From dbc9d602608864f16bf59741114f4c05119b73c6 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 15 Jun 2018 22:14:28 +0200 Subject: [PATCH 01/92] Support for remote path in reindex api (#31290) Support for remote path in reindex api Closes #22913 --- docs/reference/docs/reindex.asciidoc | 10 +-- .../index/reindex/RestReindexAction.java | 9 ++- .../index/reindex/TransportReindexAction.java | 62 ++++++++++--------- ...ReindexFromRemoteBuildRestClientTests.java | 28 +++++---- .../ReindexFromRemoteWhitelistTests.java | 4 +- .../ReindexFromRemoteWithAuthTests.java | 5 +- .../ReindexSourceTargetValidationTests.java | 4 +- .../index/reindex/RestReindexActionTests.java | 23 +++++++ .../index/reindex/RetryTests.java | 6 +- .../index/reindex/RoundTripTests.java | 5 +- .../index/reindex/remote/RemoteInfoTests.java | 16 +++-- .../index/reindex/RemoteInfo.java | 25 +++++++- .../index/reindex/ReindexRequestTests.java | 12 ++-- 13 files changed, 136 insertions(+), 73 deletions(-) diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index f05acab559ce1..bdbffb0a08d5d 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -422,11 +422,11 @@ POST _reindex // TEST[s/"username": "user",//] // TEST[s/"password": "pass"//] -The `host` parameter must contain a scheme, host, and port (e.g. -`https://otherhost:9200`). The `username` and `password` parameters are -optional, and when they are present `_reindex` will connect to the remote -Elasticsearch node using basic auth. Be sure to use `https` when using -basic auth or the password will be sent in plain text. +The `host` parameter must contain a scheme, host, port (e.g. +`https://otherhost:9200`) and optional path (e.g. `https://otherhost:9200/proxy`). +The `username` and `password` parameters are optional, and when they are present `_reindex` +will connect to the remote Elasticsearch node using basic auth. Be sure to use `https` when +using basic auth or the password will be sent in plain text. Remote hosts have to be explicitly whitelisted in elasticsearch.yaml using the `reindex.remote.whitelist` property. It can be set to a comma delimited list diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index f1ac681b59fdf..a5520c90b0ff5 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -57,7 +57,7 @@ */ public class RestReindexAction extends AbstractBaseReindexRestHandler { static final ObjectParser PARSER = new ObjectParser<>("reindex"); - private static final Pattern HOST_PATTERN = Pattern.compile("(?[^:]+)://(?[^:]+):(?\\d+)"); + private static final Pattern HOST_PATTERN = Pattern.compile("(?[^:]+)://(?[^:]+):(?\\d+)(?/.*)?"); static { ObjectParser.Parser sourceParser = (parser, request, context) -> { @@ -139,10 +139,12 @@ static RemoteInfo buildRemoteInfo(Map source) throws IOException String hostInRequest = requireNonNull(extractString(remote, "host"), "[host] must be specified to reindex from a remote cluster"); Matcher hostMatcher = HOST_PATTERN.matcher(hostInRequest); if (false == hostMatcher.matches()) { - throw new IllegalArgumentException("[host] must be of the form [scheme]://[host]:[port] but was [" + hostInRequest + "]"); + throw new IllegalArgumentException("[host] must be of the form [scheme]://[host]:[port](/[pathPrefix])? but was [" + + hostInRequest + "]"); } String scheme = hostMatcher.group("scheme"); String host = hostMatcher.group("host"); + String pathPrefix = hostMatcher.group("pathPrefix"); int port = Integer.parseInt(hostMatcher.group("port")); Map headers = extractStringStringMap(remote, "headers"); TimeValue socketTimeout = extractTimeValue(remote, "socket_timeout", RemoteInfo.DEFAULT_SOCKET_TIMEOUT); @@ -151,7 +153,8 @@ static RemoteInfo buildRemoteInfo(Map source) throws IOException throw new IllegalArgumentException( "Unsupported fields in [remote]: [" + Strings.collectionToCommaDelimitedString(remote.keySet()) + "]"); } - return new RemoteInfo(scheme, host, port, queryForRemote(source), username, password, headers, socketTimeout, connectTimeout); + return new RemoteInfo(scheme, host, port, pathPrefix, queryForRemote(source), + username, password, headers, socketTimeout, connectTimeout); } /** diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 6f839558e03cc..5e0ad0fd3fdd8 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -37,6 +37,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; +import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; @@ -206,34 +207,39 @@ static RestClient buildRestClient(RemoteInfo remoteInfo, long taskId, List header : remoteInfo.getHeaders().entrySet()) { clientHeaders[i++] = new BasicHeader(header.getKey(), header.getValue()); } - return RestClient.builder(new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme())) - .setDefaultHeaders(clientHeaders) - .setRequestConfigCallback(c -> { - c.setConnectTimeout(Math.toIntExact(remoteInfo.getConnectTimeout().millis())); - c.setSocketTimeout(Math.toIntExact(remoteInfo.getSocketTimeout().millis())); - return c; - }) - .setHttpClientConfigCallback(c -> { - // Enable basic auth if it is configured - if (remoteInfo.getUsername() != null) { - UsernamePasswordCredentials creds = new UsernamePasswordCredentials(remoteInfo.getUsername(), - remoteInfo.getPassword()); - CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, creds); - c.setDefaultCredentialsProvider(credentialsProvider); - } - // Stick the task id in the thread name so we can track down tasks from stack traces - AtomicInteger threads = new AtomicInteger(); - c.setThreadFactory(r -> { - String name = "es-client-" + taskId + "-" + threads.getAndIncrement(); - Thread t = new Thread(r, name); - threadCollector.add(t); - return t; - }); - // Limit ourselves to one reactor thread because for now the search process is single threaded. - c.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build()); - return c; - }).build(); + final RestClientBuilder builder = + RestClient.builder(new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme())) + .setDefaultHeaders(clientHeaders) + .setRequestConfigCallback(c -> { + c.setConnectTimeout(Math.toIntExact(remoteInfo.getConnectTimeout().millis())); + c.setSocketTimeout(Math.toIntExact(remoteInfo.getSocketTimeout().millis())); + return c; + }) + .setHttpClientConfigCallback(c -> { + // Enable basic auth if it is configured + if (remoteInfo.getUsername() != null) { + UsernamePasswordCredentials creds = new UsernamePasswordCredentials(remoteInfo.getUsername(), + remoteInfo.getPassword()); + CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, creds); + c.setDefaultCredentialsProvider(credentialsProvider); + } + // Stick the task id in the thread name so we can track down tasks from stack traces + AtomicInteger threads = new AtomicInteger(); + c.setThreadFactory(r -> { + String name = "es-client-" + taskId + "-" + threads.getAndIncrement(); + Thread t = new Thread(r, name); + threadCollector.add(t); + return t; + }); + // Limit ourselves to one reactor thread because for now the search process is single threaded. + c.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build()); + return c; + }); + if (Strings.hasLength(remoteInfo.getPathPrefix()) && "/".equals(remoteInfo.getPathPrefix()) == false) { + builder.setPathPrefix(remoteInfo.getPathPrefix()); + } + return builder.build(); } /** diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteBuildRestClientTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteBuildRestClientTests.java index 14e3142d226c9..db32e4813b316 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteBuildRestClientTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteBuildRestClientTests.java @@ -34,20 +34,22 @@ public class ReindexFromRemoteBuildRestClientTests extends RestClientBuilderTestCase { public void testBuildRestClient() throws Exception { - RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, new BytesArray("ignored"), null, null, emptyMap(), + for(final String path: new String[]{"", null, "/", "path"}) { + RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, path, new BytesArray("ignored"), null, null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); - long taskId = randomLong(); - List threads = synchronizedList(new ArrayList<>()); - RestClient client = TransportReindexAction.buildRestClient(remoteInfo, taskId, threads); - try { - assertBusy(() -> assertThat(threads, hasSize(2))); - int i = 0; - for (Thread thread : threads) { - assertEquals("es-client-" + taskId + "-" + i, thread.getName()); - i++; + long taskId = randomLong(); + List threads = synchronizedList(new ArrayList<>()); + RestClient client = TransportReindexAction.buildRestClient(remoteInfo, taskId, threads); + try { + assertBusy(() -> assertThat(threads, hasSize(2))); + int i = 0; + for (Thread thread : threads) { + assertEquals("es-client-" + taskId + "-" + i, thread.getName()); + i++; + } + } finally { + client.close(); } - } finally { - client.close(); } } @@ -57,7 +59,7 @@ public void testHeaders() throws Exception { for (int i = 0; i < numHeaders; ++i) { headers.put("header" + i, Integer.toString(i)); } - RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, new BytesArray("ignored"), null, null, + RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, null, new BytesArray("ignored"), null, null, headers, RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); long taskId = randomLong(); List threads = synchronizedList(new ArrayList<>()); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java index 128cd4043e283..e32370b166546 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java @@ -49,7 +49,7 @@ public void testLocalRequestWithWhitelist() { * Build a {@link RemoteInfo}, defaulting values that we don't care about in this test to values that don't hurt anything. */ private RemoteInfo newRemoteInfo(String host, int port) { - return new RemoteInfo(randomAlphaOfLength(5), host, port, new BytesArray("test"), null, null, emptyMap(), + return new RemoteInfo(randomAlphaOfLength(5), host, port, null, new BytesArray("test"), null, null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); } @@ -63,7 +63,7 @@ public void testWhitelistedRemote() { public void testWhitelistedByPrefix() { checkRemoteWhitelist(buildRemoteWhitelist(singletonList("*.example.com:9200")), - new RemoteInfo(randomAlphaOfLength(5), "es.example.com", 9200, new BytesArray("test"), null, null, emptyMap(), + new RemoteInfo(randomAlphaOfLength(5), "es.example.com", 9200, null, new BytesArray("test"), null, null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT)); checkRemoteWhitelist(buildRemoteWhitelist(singletonList("*.example.com:9200")), newRemoteInfo("6e134134a1.us-east-1.aws.example.com", 9200)); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java index 72ba651dff9ae..dc6d5eac58897 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java @@ -104,8 +104,9 @@ public void fetchTransportAddress() { * Build a {@link RemoteInfo}, defaulting values that we don't care about in this test to values that don't hurt anything. */ private RemoteInfo newRemoteInfo(String username, String password, Map headers) { - return new RemoteInfo("http", address.getAddress(), address.getPort(), new BytesArray("{\"match_all\":{}}"), username, password, - headers, RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); + return new RemoteInfo("http", address.getAddress(), address.getPort(), null, + new BytesArray("{\"match_all\":{}}"), username, password, headers, + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); } public void testReindexFromRemoteWithAuthentication() throws Exception { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java index 28b9febe1c289..4784d7f5fe546 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java @@ -88,10 +88,10 @@ public void testTargetIsAlias() { public void testRemoteInfoSkipsValidation() { // The index doesn't have to exist - succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, new BytesArray("test"), null, null, emptyMap(), + succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, null, new BytesArray("test"), null, null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT), "does_not_exist", "target"); // And it doesn't matter if they are the same index. They are considered to be different because the remote one is, well, remote. - succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, new BytesArray("test"), null, null, emptyMap(), + succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, null, new BytesArray("test"), null, null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT), "target", "target"); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java index 88fa31f423a21..b06948b90581a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java @@ -89,6 +89,7 @@ public void testBuildRemoteInfoWithAllHostParts() throws IOException { assertEquals("http", info.getScheme()); assertEquals("example.com", info.getHost()); assertEquals(9200, info.getPort()); + assertNull(info.getPathPrefix()); assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout()); // Didn't set the timeout so we should get the default assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout()); // Didn't set the timeout so we should get the default @@ -96,8 +97,30 @@ public void testBuildRemoteInfoWithAllHostParts() throws IOException { assertEquals("https", info.getScheme()); assertEquals("other.example.com", info.getHost()); assertEquals(9201, info.getPort()); + assertNull(info.getPathPrefix()); assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout()); assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout()); + + info = buildRemoteInfoHostTestCase("https://other.example.com:9201/"); + assertEquals("https", info.getScheme()); + assertEquals("other.example.com", info.getHost()); + assertEquals(9201, info.getPort()); + assertEquals("/", info.getPathPrefix()); + assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout()); + assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout()); + + info = buildRemoteInfoHostTestCase("https://other.example.com:9201/proxy-path/"); + assertEquals("https", info.getScheme()); + assertEquals("other.example.com", info.getHost()); + assertEquals(9201, info.getPort()); + assertEquals("/proxy-path/", info.getPathPrefix()); + assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout()); + assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout()); + + final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> buildRemoteInfoHostTestCase("https")); + assertEquals("[host] must be of the form [scheme]://[host]:[port](/[pathPrefix])? but was [https]", + exception.getMessage()); } public void testReindexFromRemoteRequestParsing() throws IOException { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index 1107a36086927..9ea2a24bfb136 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -124,8 +124,10 @@ public void testReindexFromRemote() throws Exception { assertNotNull(masterNode); TransportAddress address = masterNode.getHttp().getAddress().publishAddress(); - RemoteInfo remote = new RemoteInfo("http", address.getAddress(), address.getPort(), new BytesArray("{\"match_all\":{}}"), null, - null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); + RemoteInfo remote = + new RemoteInfo("http", address.getAddress(), address.getPort(), null, + new BytesArray("{\"match_all\":{}}"), null, null, emptyMap(), + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); ReindexRequestBuilder request = new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source("source").destination("dest") .setRemoteInfo(remote); return request; diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index 946ab030c8285..2dc4b59e8d9f9 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -63,8 +63,9 @@ public void testReindexRequest() throws IOException { } TimeValue socketTimeout = parseTimeValue(randomPositiveTimeValue(), "socketTimeout"); TimeValue connectTimeout = parseTimeValue(randomPositiveTimeValue(), "connectTimeout"); - reindex.setRemoteInfo(new RemoteInfo(randomAlphaOfLength(5), randomAlphaOfLength(5), port, query, username, password, headers, - socketTimeout, connectTimeout)); + reindex.setRemoteInfo( + new RemoteInfo(randomAlphaOfLength(5), randomAlphaOfLength(5), port, null, + query, username, password, headers, socketTimeout, connectTimeout)); } ReindexRequest tripped = new ReindexRequest(); roundTrip(reindex, tripped); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java index d6ab599b43c2d..de0ade9c47cc3 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java @@ -26,17 +26,21 @@ import static java.util.Collections.emptyMap; public class RemoteInfoTests extends ESTestCase { - private RemoteInfo newRemoteInfo(String scheme, String username, String password) { - return new RemoteInfo(scheme, "testhost", 12344, new BytesArray("testquery"), username, password, emptyMap(), + private RemoteInfo newRemoteInfo(String scheme, String prefixPath, String username, String password) { + return new RemoteInfo(scheme, "testhost", 12344, prefixPath, new BytesArray("testquery"), username, password, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); } public void testToString() { - assertEquals("host=testhost port=12344 query=testquery", newRemoteInfo("http", null, null).toString()); - assertEquals("host=testhost port=12344 query=testquery username=testuser", newRemoteInfo("http", "testuser", null).toString()); + assertEquals("host=testhost port=12344 query=testquery", + newRemoteInfo("http", null, null, null).toString()); + assertEquals("host=testhost port=12344 query=testquery username=testuser", + newRemoteInfo("http", null, "testuser", null).toString()); assertEquals("host=testhost port=12344 query=testquery username=testuser password=<<>>", - newRemoteInfo("http", "testuser", "testpass").toString()); + newRemoteInfo("http", null, "testuser", "testpass").toString()); assertEquals("scheme=https host=testhost port=12344 query=testquery username=testuser password=<<>>", - newRemoteInfo("https", "testuser", "testpass").toString()); + newRemoteInfo("https", null, "testuser", "testpass").toString()); + assertEquals("scheme=https host=testhost port=12344 pathPrefix=prxy query=testquery username=testuser password=<<>>", + newRemoteInfo("https", "prxy", "testuser", "testpass").toString()); } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java b/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java index 8e7a990902631..494658821cfd5 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java @@ -48,6 +48,7 @@ public class RemoteInfo implements Writeable { private final String scheme; private final String host; private final int port; + private final String pathPrefix; private final BytesReference query; private final String username; private final String password; @@ -61,11 +62,12 @@ public class RemoteInfo implements Writeable { */ private final TimeValue connectTimeout; - public RemoteInfo(String scheme, String host, int port, BytesReference query, String username, String password, - Map headers, TimeValue socketTimeout, TimeValue connectTimeout) { + public RemoteInfo(String scheme, String host, int port, String pathPrefix, BytesReference query, String username, String password, + Map headers, TimeValue socketTimeout, TimeValue connectTimeout) { this.scheme = requireNonNull(scheme, "[scheme] must be specified to reindex from a remote cluster"); this.host = requireNonNull(host, "[host] must be specified to reindex from a remote cluster"); this.port = port; + this.pathPrefix = pathPrefix; this.query = requireNonNull(query, "[query] must be specified to reindex from a remote cluster"); this.username = username; this.password = password; @@ -97,6 +99,11 @@ public RemoteInfo(StreamInput in) throws IOException { socketTimeout = DEFAULT_SOCKET_TIMEOUT; connectTimeout = DEFAULT_CONNECT_TIMEOUT; } + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + pathPrefix = in.readOptionalString(); + } else { + pathPrefix = null; + } } @Override @@ -116,6 +123,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeTimeValue(socketTimeout); out.writeTimeValue(connectTimeout); } + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeOptionalString(pathPrefix); + } } public String getScheme() { @@ -130,6 +140,11 @@ public int getPort() { return port; } + @Nullable + public String getPathPrefix() { + return pathPrefix; + } + public BytesReference getQuery() { return query; } @@ -169,7 +184,11 @@ public String toString() { // http is the default so it isn't worth taking up space if it is the scheme b.append("scheme=").append(scheme).append(' '); } - b.append("host=").append(host).append(" port=").append(port).append(" query=").append(query.utf8ToString()); + b.append("host=").append(host).append(" port=").append(port); + if (pathPrefix != null) { + b.append(" pathPrefix=").append(pathPrefix); + } + b.append(" query=").append(query.utf8ToString()); if (username != null) { b.append(" username=").append(username); } diff --git a/server/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java b/server/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java index 9f4b20ff35ba3..6c1988a1440e9 100644 --- a/server/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java @@ -37,8 +37,9 @@ public class ReindexRequestTests extends AbstractBulkByScrollRequestTestCase Date: Fri, 15 Jun 2018 21:15:35 +0100 Subject: [PATCH 02/92] [ML] Put ML filter API response should contain the filter (#31362) --- .../xpack/core/ml/action/PutFilterAction.java | 51 +++++++++++++++++-- .../PutCalendarActionResponseTests.java | 13 ++++- .../action/PutFilterActionResponseTests.java | 31 +++++++++++ .../ml/action/TransportPutFilterAction.java | 2 +- .../rest-api-spec/test/ml/filter_crud.yml | 6 ++- .../ml/integration/DetectionRulesIT.java | 7 ++- .../MlNativeAutodetectIntegTestCase.java | 5 +- 7 files changed, 100 insertions(+), 15 deletions(-) create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutFilterActionResponseTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java index 2f7606795f001..8269a105b6463 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -115,10 +115,53 @@ public RequestBuilder(ElasticsearchClient client) { } } - public static class Response extends AcknowledgedResponse { + public static class Response extends ActionResponse implements ToXContentObject { - public Response() { - super(true); + private MlFilter filter; + + Response() { + } + + public Response(MlFilter filter) { + this.filter = filter; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + filter = new MlFilter(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + filter.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return filter.toXContent(builder, params); + } + + public MlFilter getFilter() { + return filter; + } + + @Override + public int hashCode() { + return Objects.hash(filter); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Response other = (Response) obj; + return Objects.equals(filter, other.filter); } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutCalendarActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutCalendarActionResponseTests.java index 941de884554bf..77d4d788db620 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutCalendarActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutCalendarActionResponseTests.java @@ -5,10 +5,14 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.core.ml.calendars.CalendarTests; -public class PutCalendarActionResponseTests extends AbstractStreamableTestCase { +import java.io.IOException; + +public class PutCalendarActionResponseTests extends AbstractStreamableXContentTestCase { @Override protected PutCalendarAction.Response createTestInstance() { @@ -19,4 +23,9 @@ protected PutCalendarAction.Response createTestInstance() { protected PutCalendarAction.Response createBlankInstance() { return new PutCalendarAction.Response(); } + + @Override + protected PutCalendarAction.Response doParseInstance(XContentParser parser) throws IOException { + return new PutCalendarAction.Response(Calendar.LENIENT_PARSER.parse(parser, null).build()); + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutFilterActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutFilterActionResponseTests.java new file mode 100644 index 0000000000000..1e697f5172a4a --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutFilterActionResponseTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.core.ml.job.config.MlFilterTests; + +import java.io.IOException; + +public class PutFilterActionResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected PutFilterAction.Response createTestInstance() { + return new PutFilterAction.Response(MlFilterTests.createRandom()); + } + + @Override + protected PutFilterAction.Response createBlankInstance() { + return new PutFilterAction.Response(); + } + + @Override + protected PutFilterAction.Response doParseInstance(XContentParser parser) throws IOException { + return new PutFilterAction.Response(MlFilter.LENIENT_PARSER.parse(parser, null).build()); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java index f7ac11e2d1aec..fc14ef085dd33 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -69,7 +69,7 @@ protected void doExecute(PutFilterAction.Request request, ActionListener { + "description": "A newly created filter", "items": ["abc", "xyz"] } - - match: { acknowledged: true } + - match: { filter_id: filter-foo2 } + - match: { description: "A newly created filter" } + - match: { items: ["abc", "xyz"]} - do: xpack.ml.get_filters: @@ -128,6 +131,7 @@ setup: - match: filters.0: filter_id: "filter-foo2" + description: "A newly created filter" items: ["abc", "xyz"] --- diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java index b99170546df3b..fbda8ad716b2c 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java @@ -35,7 +35,6 @@ import java.util.Set; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.isOneOf; /** @@ -121,7 +120,7 @@ public void testCondition() throws Exception { public void testScope() throws Exception { MlFilter safeIps = MlFilter.builder("safe_ips").setItems("111.111.111.111", "222.222.222.222").build(); - assertThat(putMlFilter(safeIps), is(true)); + assertThat(putMlFilter(safeIps).getFilter(), equalTo(safeIps)); DetectionRule rule = new DetectionRule.Builder(RuleScope.builder().include("ip", "safe_ips")).build(); @@ -179,7 +178,7 @@ public void testScope() throws Exception { // Now let's update the filter MlFilter updatedFilter = MlFilter.builder(safeIps.getId()).setItems("333.333.333.333").build(); - assertThat(putMlFilter(updatedFilter), is(true)); + assertThat(putMlFilter(updatedFilter).getFilter(), equalTo(updatedFilter)); // Wait until the notification that the process was updated is indexed assertBusy(() -> { @@ -230,7 +229,7 @@ public void testScopeAndCondition() throws IOException { // We have 2 IPs and they're both safe-listed. List ips = Arrays.asList("111.111.111.111", "222.222.222.222"); MlFilter safeIps = MlFilter.builder("safe_ips").setItems(ips).build(); - assertThat(putMlFilter(safeIps), is(true)); + assertThat(putMlFilter(safeIps).getFilter(), equalTo(safeIps)); // Ignore if ip in safe list AND actual < 10. DetectionRule rule = new DetectionRule.Builder(RuleScope.builder().include("ip", "safe_ips")) diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index 9057db476ad77..4e6fb03497e6a 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -419,9 +419,8 @@ protected List getForecasts(String jobId, ForecastRequestStats forecas return forecasts; } - protected boolean putMlFilter(MlFilter filter) { - PutFilterAction.Response response = client().execute(PutFilterAction.INSTANCE, new PutFilterAction.Request(filter)).actionGet(); - return response.isAcknowledged(); + protected PutFilterAction.Response putMlFilter(MlFilter filter) { + return client().execute(PutFilterAction.INSTANCE, new PutFilterAction.Request(filter)).actionGet(); } protected PutCalendarAction.Response putCalendar(String calendarId, List jobIds, String description) { From babb16d90cdc05b1b46d91948f7d6bf38a49bf6e Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 15 Jun 2018 22:24:47 +0200 Subject: [PATCH 03/92] Support for remote path in reindex api - post backport fix Closes #22913 --- .../main/java/org/elasticsearch/index/reindex/RemoteInfo.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java b/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java index 494658821cfd5..70f79a9def605 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java @@ -99,7 +99,7 @@ public RemoteInfo(StreamInput in) throws IOException { socketTimeout = DEFAULT_SOCKET_TIMEOUT; connectTimeout = DEFAULT_CONNECT_TIMEOUT; } - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { pathPrefix = in.readOptionalString(); } else { pathPrefix = null; @@ -123,7 +123,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeTimeValue(socketTimeout); out.writeTimeValue(connectTimeout); } - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeOptionalString(pathPrefix); } } From c3084a332b6d2f34fa069d969932d53aacf1e5f7 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Fri, 15 Jun 2018 19:07:47 -0400 Subject: [PATCH 04/92] SQL: Fix rest endpoint names in node stats (#31371) Fixes wrong name for the sql translate endpoint and makes rest endpoint names in stats more consistent. --- .../xpack/sql/plugin/RestSqlClearCursorAction.java | 2 +- .../org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java | 2 +- .../elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java index 534d0459180e0..175b78d4f6655 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java @@ -37,6 +37,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli @Override public String getName() { - return "sql_translate_action"; + return "xpack_sql_clear_cursor_action"; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java index 9e34a3fb2e097..a8daa1136d390 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java @@ -114,6 +114,6 @@ public RestResponse buildResponse(SqlQueryResponse response) throws Exception { @Override public String getName() { - return "xpack_sql_action"; + return "xpack_sql_query_action"; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java index 503ee84314820..74d94e4800606 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java @@ -40,7 +40,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli @Override public String getName() { - return "sql_translate_action"; + return "xpack_sql_translate_action"; } } From 215c5f292a9b954fa7ccfe3f29540875076ae1ae Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Sun, 17 Jun 2018 00:14:59 +0300 Subject: [PATCH 05/92] [DOCS] Improve install and setup section for SQL JDBC --- x-pack/docs/en/sql/endpoints/jdbc.asciidoc | 133 ++++++++++++++++++++- 1 file changed, 129 insertions(+), 4 deletions(-) diff --git a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc index 9ac197048ddae..067a4c586fb8e 100644 --- a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc +++ b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc @@ -6,10 +6,133 @@ Elasticsearch's SQL jdbc driver is a rich, fully featured JDBC driver for Elasti It is Type 4 driver, meaning it is a platform independent, stand-alone, Direct to Database, pure Java driver that converts JDBC calls to Elasticsearch SQL. -// TODO add example of resolving the artifact in maven and gradle. +[float] +=== Installation -You can connect to it using the two APIs offered -by JDBC, namely `java.sql.Driver` and `DriverManager`: +The JDBC driver can be obtained either by downloading it from the https://www.elastic.co/downloads/jdbc-client[elastic.co] site or by using a http://maven.apache.org/[Maven]-compatible tool with the following dependency: + +["source","xml",subs="attributes"] +---- + + org.elasticsearch.plugin.jdbc + jdbc + {ver} + +---- + +from `artifacts.elastic.co/maven` by adding it to the repositories list: + +["source","xml",subs="attributes"] +---- + + + elastic.co + https://artifacts.elastic.co/maven + + +---- + +[[jdbc-setup]] +[float] +=== Setup + +The driver main class is `org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcDriver`. Note the driver +also implements the JDBC 4.0 +Service Provider+ mechanism meaning it is registerd automatically +as long as its available in the classpath. + +Once registered, the driver expects the following syntax as an URL: + +["source","text",subs="attributes"] +---- +jdbc:es://<1>[http|https]?<2>[host[:port]]*<3>/[prefix]*<4>[?[option=value]&<5>]* +---- + +<1> `jdbc:es://` prefix. Mandatory. +<2> type of HTTP connection to make - `http` (default) or `https`. Optional. +<3> host (`localhost` by default) and port (`9200` by default). Optional. +<4> prefix (empty by default). Typically used when hosting {es} under a certain path. Optional. +<5> Parameters for the JDBC driver. Empty by default. Optional. + +The driver recognized the following parameters: + +[[jdbc-cfg]] +[float] +===== Essential + +`timezone` (default JVM timezone):: +Timezone used by the driver _per connection_ indicated by its `ID`. +*Highly* recommended to set it (to, say, `UTC`) as the JVM timezone can vary, is global for the entire JVM and can't be changed easily when running under a security manager. + +[[jdbc-cfg-network]] +[float] +===== Network + +`connect.timeout` (default 30s):: +Connection timeout (in seconds). That is the maximum amount of time waiting to make a connection to the server. + +`network.timeout` (default 60s):: +Network timeout (in seconds). That is the maximum amount of time waiting for the network. + +`page.timeout` (default 45s):: +Page timeout (in seconds). That is the maximum amount of time waiting for a page. + +`page.size` (default 1000):: +Page size (in entries). The number of results returned per page by the server. + +`query.timeout` (default 90s):: +Query timeout (in seconds). That is the maximum amount of time waiting for a query to return. + +[[jdbc-cfg-auth]] +[float] +==== Basic Authentication + +`user`:: Basic Authentication user name + +`password`:: Basic Authentication password + +[[jdbc-cfg-ssl]] +[float] +==== SSL + +`ssl` (default false):: Enable SSL + +`ssl.keystore.location`:: key store (if used) location + +`ssl.keystore.pass`:: key store password + +`ssl.keystore.type` (default `JKS`):: key store type. `PKCS12` is a common, alternative format + +`ssl.truststore.location`:: trust store location + +`ssl.truststore.pass`:: trust store password + +`ssl.cert.allow.self.signed` (default `false`):: Whether or not to allow self signed certificates + +`ssl.protocol`(default `TLS`):: SSL protocol to be used + +[float] +==== Proxy + +`proxy.http`:: Http proxy host name + +`proxy.socks`:: SOCKS proxy host name + + +To put all of it together, the following URL: + +["source","text",subs="attributes"] +---- +jdbc:es://http://server:3456/timezone=UTC&page.size=250 +---- + +Opens up a {es-jdbc} connection to `server` on port `3456`, setting the JDBC timezone to `UTC` and its pagesize to `250` entries. + +=== API usage + +One can use JDBC through the official `java.sql` and `javax.sql` packages: + +==== `java.sql` +The former through `java.sql.Driver` and `DriverManager`: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -20,7 +143,9 @@ HTTP traffic. The port is by default 9200. <2> Properties for connecting to Elasticsearch. An empty `Properties` instance is fine for unsecured Elasticsearch. -or `javax.sql.DataSource` through +==== `javax.sql` + +Accessible through the `javax.sql.DataSource` API: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{jdbc-tests}/JdbcIntegrationTestCase.java[connect-ds] From bbcfcd1ca5a6b78757a7f5578da0cc618839c27f Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Sun, 17 Jun 2018 00:40:01 +0300 Subject: [PATCH 06/92] [DOCS] Fix version in SQL JDBC Maven template --- x-pack/docs/en/sql/endpoints/jdbc.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc index 067a4c586fb8e..a980278810e57 100644 --- a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc +++ b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc @@ -16,7 +16,7 @@ The JDBC driver can be obtained either by downloading it from the https://www.el org.elasticsearch.plugin.jdbc jdbc - {ver} + {version} ---- From 5b94afd3099cd60c7342ad7ba4e5764420d531eb Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 17 Jun 2018 12:17:28 +0300 Subject: [PATCH 07/92] [TEST] Double write alias fault (#30942) --- .../cluster/metadata/ToAndFromJsonMetaDataTests.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java index bde478eb36381..3ac55ec663ca0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java @@ -132,7 +132,7 @@ public void testSimpleJsonFromAndTo() throws IOException { .putMapping("mapping1", MAPPING_SOURCE1) .putMapping("mapping2", MAPPING_SOURCE2) .putAlias(newAliasMetaDataBuilder("alias1").filter(ALIAS_FILTER1)) - .putAlias(newAliasMetaDataBuilder("alias2").writeIndex(randomBoolean() ? null : randomBoolean())) + .putAlias(newAliasMetaDataBuilder("alias3").writeIndex(randomBoolean() ? null : randomBoolean())) .putAlias(newAliasMetaDataBuilder("alias4").filter(ALIAS_FILTER2))) .put(IndexTemplateMetaData.builder("foo") .patterns(Collections.singletonList("bar")) @@ -287,10 +287,10 @@ public void testSimpleJsonFromAndTo() throws IOException { assertThat(indexMetaData.getAliases().size(), equalTo(3)); assertThat(indexMetaData.getAliases().get("alias1").alias(), equalTo("alias1")); assertThat(indexMetaData.getAliases().get("alias1").filter().string(), equalTo(ALIAS_FILTER1)); - assertThat(indexMetaData.getAliases().get("alias2").alias(), equalTo("alias2")); - assertThat(indexMetaData.getAliases().get("alias2").filter(), nullValue()); - assertThat(indexMetaData.getAliases().get("alias2").writeIndex(), - equalTo(metaData.index("test12").getAliases().get("alias2").writeIndex())); + assertThat(indexMetaData.getAliases().get("alias3").alias(), equalTo("alias3")); + assertThat(indexMetaData.getAliases().get("alias3").filter(), nullValue()); + assertThat(indexMetaData.getAliases().get("alias3").writeIndex(), + equalTo(metaData.index("test12").getAliases().get("alias3").writeIndex())); assertThat(indexMetaData.getAliases().get("alias4").alias(), equalTo("alias4")); assertThat(indexMetaData.getAliases().get("alias4").filter().string(), equalTo(ALIAS_FILTER2)); From 3d5f113ada63b9175631e6ad1ef1645dc8f5f275 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Sun, 17 Jun 2018 13:32:53 +0200 Subject: [PATCH 08/92] Ensure we don't use a remote profile if cluster name matches (#31331) If we are running into a race condition between a node being configured to be a remote node for cross cluster search etc. and that node joining the cluster we might connect to that node with a remote profile. If that node now joins the cluster it connected to it as a CCS remote node we use the wrong profile and can't use bulk connections etc. anymore. This change uses the remote profile only if we connect to a node that has a different cluster name than the local cluster. This is not a perfect fix for this situation but is the safe option while potentially only loose a small optimization of using less connections per node which is small anyways since we only connect to a small set of nodes. Closes #29321 --- .../transport/RemoteClusterConnection.java | 33 ++++++- .../transport/TransportService.java | 18 +++- .../RemoteClusterConnectionTests.java | 96 +++++++++++++++++++ .../transport/MockTcpTransport.java | 17 +++- 4 files changed, 153 insertions(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index 82b921bd233b0..e37f46c5517db 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -87,6 +87,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo private volatile boolean skipUnavailable; private final ConnectHandler connectHandler; private SetOnce remoteClusterName = new SetOnce<>(); + private final ClusterName localClusterName; /** * Creates a new {@link RemoteClusterConnection} @@ -100,6 +101,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo RemoteClusterConnection(Settings settings, String clusterAlias, List seedNodes, TransportService transportService, int maxNumRemoteConnections, Predicate nodePredicate) { super(settings); + this.localClusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); this.transportService = transportService; this.maxNumRemoteConnections = maxNumRemoteConnections; this.nodePredicate = nodePredicate; @@ -310,6 +312,21 @@ public boolean isClosed() { return connectHandler.isClosed(); } + private ConnectionProfile getRemoteProfile(ClusterName name) { + // we can only compare the cluster name to make a decision if we should use a remote profile + // we can't use a cluster UUID here since we could be connecting to that remote cluster before + // the remote node has joined its cluster and have a cluster UUID. The fact that we just lose a + // rather smallish optimization on the connection layer under certain situations where remote clusters + // have the same name as the local one is minor here. + // the alternative here is to complicate the remote infrastructure to also wait until we formed a cluster, + // gained a cluster UUID and then start connecting etc. we rather use this simplification in order to maintain simplicity + if (this.localClusterName.equals(name)) { + return null; + } else { + return remoteProfile; + } + } + /** * The connect handler manages node discovery and the actual connect to the remote cluster. * There is at most one connect job running at any time. If such a connect job is triggered @@ -419,7 +436,6 @@ protected void doRun() { collectRemoteNodes(seedNodes.iterator(), transportService, listener); } }); - } void collectRemoteNodes(Iterator seedNodes, @@ -431,21 +447,27 @@ void collectRemoteNodes(Iterator seedNodes, if (seedNodes.hasNext()) { cancellableThreads.executeIO(() -> { final DiscoveryNode seedNode = seedNodes.next(); - final DiscoveryNode handshakeNode; + final TransportService.HandshakeResponse handshakeResponse; Transport.Connection connection = transportService.openConnection(seedNode, ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, null, null)); boolean success = false; try { try { - handshakeNode = transportService.handshake(connection, remoteProfile.getHandshakeTimeout().millis(), + handshakeResponse = transportService.handshake(connection, remoteProfile.getHandshakeTimeout().millis(), (c) -> remoteClusterName.get() == null ? true : c.equals(remoteClusterName.get())); } catch (IllegalStateException ex) { logger.warn(() -> new ParameterizedMessage("seed node {} cluster name mismatch expected " + "cluster name {}", connection.getNode(), remoteClusterName.get()), ex); throw ex; } + + final DiscoveryNode handshakeNode = handshakeResponse.getDiscoveryNode(); if (nodePredicate.test(handshakeNode) && connectedNodes.size() < maxNumRemoteConnections) { - transportService.connectToNode(handshakeNode, remoteProfile); + transportService.connectToNode(handshakeNode, getRemoteProfile(handshakeResponse.getClusterName())); + if (remoteClusterName.get() == null) { + assert handshakeResponse.getClusterName().value() != null; + remoteClusterName.set(handshakeResponse.getClusterName()); + } connectedNodes.add(handshakeNode); } ClusterStateRequest request = new ClusterStateRequest(); @@ -552,7 +574,8 @@ public void handleResponse(ClusterStateResponse response) { for (DiscoveryNode node : nodesIter) { if (nodePredicate.test(node) && connectedNodes.size() < maxNumRemoteConnections) { try { - transportService.connectToNode(node, remoteProfile); // noop if node is connected + transportService.connectToNode(node, getRemoteProfile(remoteClusterName.get())); // noop if node is + // connected connectedNodes.add(node); } catch (ConnectTransportException | IllegalStateException ex) { // ISE if we fail the handshake with an version incompatible node diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 9755898be5fef..656d8c3841769 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -342,7 +342,7 @@ public void connectToNode(final DiscoveryNode node, ConnectionProfile connection } transport.connectToNode(node, connectionProfile, (newConnection, actualProfile) -> { // We don't validate cluster names to allow for CCS connections. - final DiscoveryNode remote = handshake(newConnection, actualProfile.getHandshakeTimeout().millis(), cn -> true); + final DiscoveryNode remote = handshake(newConnection, actualProfile.getHandshakeTimeout().millis(), cn -> true).discoveryNode; if (validateConnections && node.equals(remote) == false) { throw new ConnectTransportException(node, "handshake failed. unexpected remote node " + remote); } @@ -378,7 +378,7 @@ public Transport.Connection openConnection(final DiscoveryNode node, ConnectionP public DiscoveryNode handshake( final Transport.Connection connection, final long handshakeTimeout) throws ConnectTransportException { - return handshake(connection, handshakeTimeout, clusterName::equals); + return handshake(connection, handshakeTimeout, clusterName::equals).discoveryNode; } /** @@ -390,11 +390,11 @@ public DiscoveryNode handshake( * @param connection the connection to a specific node * @param handshakeTimeout handshake timeout * @param clusterNamePredicate cluster name validation predicate - * @return the connected node + * @return the handshake response * @throws ConnectTransportException if the connection failed * @throws IllegalStateException if the handshake failed */ - public DiscoveryNode handshake( + public HandshakeResponse handshake( final Transport.Connection connection, final long handshakeTimeout, Predicate clusterNamePredicate) throws ConnectTransportException { final HandshakeResponse response; @@ -420,7 +420,7 @@ public HandshakeResponse newInstance() { throw new IllegalStateException("handshake failed, incompatible version [" + response.version + "] - " + node); } - return response.discoveryNode; + return response; } static class HandshakeRequest extends TransportRequest { @@ -461,6 +461,14 @@ public void writeTo(StreamOutput out) throws IOException { clusterName.writeTo(out); Version.writeVersion(version, out); } + + public DiscoveryNode getDiscoveryNode() { + return discoveryNode; + } + + public ClusterName getClusterName() { + return clusterName; + } } public void disconnectFromNode(DiscoveryNode node) { diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index ac6f99351e46d..637b8fb26a880 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -142,6 +142,102 @@ public static MockTransportService startTransport( } } + public void testLocalProfileIsUsedForLocalCluster() throws Exception { + List knownNodes = new CopyOnWriteArrayList<>(); + try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); + MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) { + DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); + DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode(); + knownNodes.add(seedTransport.getLocalDiscoNode()); + knownNodes.add(discoverableTransport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { + service.start(); + service.acceptIncomingRequests(); + try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", + Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { + updateSeedNodes(connection, Arrays.asList(seedNode)); + assertTrue(service.nodeConnected(seedNode)); + assertTrue(service.nodeConnected(discoverableNode)); + assertTrue(connection.assertNoRunningConnections()); + PlainTransportFuture futureHandler = new PlainTransportFuture<>( + new FutureTransportResponseHandler() { + @Override + public ClusterSearchShardsResponse read(StreamInput in) throws IOException { + ClusterSearchShardsResponse inst = new ClusterSearchShardsResponse(); + inst.readFrom(in); + return inst; + } + }); + TransportRequestOptions options = TransportRequestOptions.builder().withType(TransportRequestOptions.Type.BULK) + .build(); + service.sendRequest(connection.getConnection(), ClusterSearchShardsAction.NAME, new ClusterSearchShardsRequest(), + options, futureHandler); + futureHandler.txGet(); + } + } + } + } + + public void testRemoteProfileIsUsedForRemoteCluster() throws Exception { + List knownNodes = new CopyOnWriteArrayList<>(); + try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT, threadPool, + Settings.builder().put("cluster.name", "foobar").build()); + MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT, + threadPool, Settings.builder().put("cluster.name", "foobar").build())) { + DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); + DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode(); + knownNodes.add(seedTransport.getLocalDiscoNode()); + knownNodes.add(discoverableTransport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { + service.start(); + service.acceptIncomingRequests(); + try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", + Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { + updateSeedNodes(connection, Arrays.asList(seedNode)); + assertTrue(service.nodeConnected(seedNode)); + assertTrue(service.nodeConnected(discoverableNode)); + assertTrue(connection.assertNoRunningConnections()); + PlainTransportFuture futureHandler = new PlainTransportFuture<>( + new FutureTransportResponseHandler() { + @Override + public ClusterSearchShardsResponse read(StreamInput in) throws IOException { + ClusterSearchShardsResponse inst = new ClusterSearchShardsResponse(); + inst.readFrom(in); + return inst; + } + }); + TransportRequestOptions options = TransportRequestOptions.builder().withType(TransportRequestOptions.Type.BULK) + .build(); + IllegalStateException ise = (IllegalStateException) expectThrows(SendRequestTransportException.class, () -> { + service.sendRequest(discoverableNode, + ClusterSearchShardsAction.NAME, new ClusterSearchShardsRequest(), options, futureHandler); + futureHandler.txGet(); + }).getCause(); + assertEquals(ise.getMessage(), "can't select channel size is 0 for types: [RECOVERY, BULK, STATE]"); + + PlainTransportFuture handler = new PlainTransportFuture<>( + new FutureTransportResponseHandler() { + @Override + public ClusterSearchShardsResponse read(StreamInput in) throws IOException { + ClusterSearchShardsResponse inst = new ClusterSearchShardsResponse(); + inst.readFrom(in); + return inst; + } + }); + TransportRequestOptions ops = TransportRequestOptions.builder().withType(TransportRequestOptions.Type.REG) + .build(); + service.sendRequest(connection.getConnection(), ClusterSearchShardsAction.NAME, new ClusterSearchShardsRequest(), + ops, handler); + handler.txGet(); + } + } + } + } + public void testDiscoverSingleNode() throws Exception { List knownNodes = new CopyOnWriteArrayList<>(); try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index 37bf95d0b153a..8831c46c01136 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -191,7 +191,22 @@ protected MockChannel initiateChannel(InetSocketAddress address, ActionListener< @Override protected ConnectionProfile resolveConnectionProfile(ConnectionProfile connectionProfile) { ConnectionProfile connectionProfile1 = resolveConnectionProfile(connectionProfile, defaultConnectionProfile); - ConnectionProfile.Builder builder = new ConnectionProfile.Builder(LIGHT_PROFILE); + ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); + Set allTypesWithConnection = new HashSet<>(); + Set allTypesWithoutConnection = new HashSet<>(); + for (ConnectionProfile.ConnectionTypeHandle handle : connectionProfile1.getHandles()) { + Set types = handle.getTypes(); + if (handle.length > 0) { + allTypesWithConnection.addAll(types); + } else { + allTypesWithoutConnection.addAll(types); + } + } + // make sure we maintain at least the types that are supported by this profile even if we only use a single channel for them. + builder.addConnections(1, allTypesWithConnection.toArray(new TransportRequestOptions.Type[0])); + if (allTypesWithoutConnection.isEmpty() == false) { + builder.addConnections(0, allTypesWithoutConnection.toArray(new TransportRequestOptions.Type[0])); + } builder.setHandshakeTimeout(connectionProfile1.getHandshakeTimeout()); builder.setConnectTimeout(connectionProfile1.getConnectTimeout()); return builder.build(); From 16fa6b270f5ebe9e78e0d283a88f4f7ced71c35a Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Sun, 17 Jun 2018 21:42:42 -0700 Subject: [PATCH 09/92] Remove some cases in FieldTypeLookupTests that are no longer relevant. (#31381) --- .../index/mapper/FieldTypeLookupTests.java | 25 ------------------- 1 file changed, 25 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index 39753548ee390..4f1b908cae84e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -32,8 +32,6 @@ import java.util.Iterator; import java.util.List; -import static org.hamcrest.Matchers.containsString; - public class FieldTypeLookupTests extends ESTestCase { public void testEmpty() { @@ -80,29 +78,6 @@ public void testAddExistingField() { assertEquals(f2.fieldType(), lookup2.get("foo")); } - public void testAddExistingIndexName() { - MockFieldMapper f = new MockFieldMapper("foo"); - MockFieldMapper f2 = new MockFieldMapper("bar"); - FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); - - assertSame(f.fieldType(), lookup2.get("foo")); - assertSame(f2.fieldType(), lookup2.get("bar")); - assertEquals(2, size(lookup2.iterator())); - } - - public void testAddExistingFullName() { - MockFieldMapper f = new MockFieldMapper("foo"); - MockFieldMapper f2 = new MockFieldMapper("foo"); - FieldTypeLookup lookup = new FieldTypeLookup(); - try { - lookup.copyAndAddAll("type2", newList(f2)); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [foo] has different [index_name]")); - } - } - public void testCheckCompatibilityMismatchedTypes() { FieldMapper f1 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); From 3378240b2954fcf879ac84f440002685c4b30cc9 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 18 Jun 2018 09:42:11 +0300 Subject: [PATCH 10/92] Reload secure settings for plugins (#31383) Adds the ability to reread and decrypt the local node keystore. Commonly, the contents of the keystore, backing the `SecureSettings`, are not retrievable except during node initialization. This changes that by adding a new API which broadcasts a password to every node. The password is used to decrypt the local keystore and use it to populate a `Settings` object that is passes to all the plugins implementing the `ReloadablePlugin` interface. The plugin is then responsible to do whatever "reload" means in his case. When the `reload`handler returns, the keystore is closed and its contents are no longer retrievable. Password is never stored persistently on any node. Plugins that have been moded in this commit are: `repository-azure`, `repository-s3`, `repository-gcs` and `discovery-ec2`. --- .../discovery/ec2/AmazonEc2Reference.java | 61 +++ .../discovery/ec2/AwsEc2Service.java | 74 ++- .../discovery/ec2/AwsEc2ServiceImpl.java | 148 +++--- .../ec2/AwsEc2UnicastHostsProvider.java | 43 +- .../discovery/ec2/Ec2ClientSettings.java | 145 ++++++ .../discovery/ec2/Ec2DiscoveryPlugin.java | 69 +-- .../discovery/ec2/AmazonEC2Mock.java | 15 +- .../discovery/ec2/AwsEc2ServiceImplTests.java | 31 +- .../discovery/ec2/AwsEc2ServiceMock.java | 33 +- .../ec2/Ec2DiscoveryPluginMock.java} | 20 +- .../ec2/Ec2DiscoveryPluginTests.java | 93 +++- .../discovery/ec2/Ec2DiscoveryTests.java | 46 +- .../repositories/azure/AzureBlobStore.java | 58 +-- .../repositories/azure/AzureRepository.java | 52 +-- .../azure/AzureRepositoryPlugin.java | 27 +- .../azure/AzureStorageService.java | 47 +- .../azure/AzureStorageServiceImpl.java | 296 ++++++------ .../azure/AzureStorageSettings.java | 77 +++- .../azure/AzureRepositorySettingsTests.java | 3 +- .../azure/AzureSnapshotRestoreTests.java | 56 ++- .../azure/AzureStorageServiceMock.java | 42 +- .../azure/AzureStorageServiceTests.java | 191 +++++--- .../gcs/GoogleCloudStorageBlobStore.java | 104 ++--- .../gcs/GoogleCloudStoragePlugin.java | 32 +- .../gcs/GoogleCloudStorageRepository.java | 5 +- .../gcs/GoogleCloudStorageService.java | 77 +++- ...leCloudStorageBlobStoreContainerTests.java | 15 +- ...eCloudStorageBlobStoreRepositoryTests.java | 20 +- .../gcs/GoogleCloudStorageBlobStoreTests.java | 15 +- .../gcs/GoogleCloudStorageServiceTests.java | 92 +++- plugins/repository-s3/build.gradle | 2 +- .../repositories/s3/AmazonS3Reference.java | 63 +++ .../repositories/s3/AwsS3Service.java | 23 +- .../repositories/s3/InternalAwsS3Service.java | 148 +++--- .../repositories/s3/S3BlobContainer.java | 137 +++--- .../repositories/s3/S3BlobStore.java | 74 +-- .../repositories/s3/S3ClientSettings.java | 72 ++- .../repositories/s3/S3Repository.java | 46 +- .../repositories/s3/S3RepositoryPlugin.java | 45 +- .../plugin-metadata/plugin-security.policy | 3 + .../s3/AbstractS3SnapshotRestoreTest.java | 26 +- .../repositories/s3/AmazonS3Wrapper.java | 5 + .../s3/AwsS3ServiceImplTests.java | 134 +++--- .../repositories/s3/MockAmazonS3.java | 5 + .../s3/RepositoryCredentialsTests.java | 211 +++++++++ .../RepositorySettingsCredentialsTests.java | 41 -- .../s3/S3BlobStoreContainerTests.java | 31 +- .../s3/S3BlobStoreRepositoryTests.java | 26 +- .../repositories/s3/S3BlobStoreTests.java | 12 +- .../repositories/s3/S3RepositoryTests.java | 89 ++-- .../repositories/s3/TestAmazonS3.java | 38 +- .../repositories/s3/TestAwsS3Service.java | 28 +- .../elasticsearch/action/ActionModule.java | 6 + .../NodesReloadSecureSettingsAction.java | 21 +- .../NodesReloadSecureSettingsRequest.java | 160 +++++++ ...desReloadSecureSettingsRequestBuilder.java | 84 ++++ .../NodesReloadSecureSettingsResponse.java | 149 +++++++ ...nsportNodesReloadSecureSettingsAction.java | 144 ++++++ .../client/ClusterAdminClient.java | 6 + .../client/support/AbstractClient.java | 7 + .../common/settings/KeyStoreWrapper.java | 4 +- .../common/util/LazyInitializable.java | 108 +++++ .../org/elasticsearch/plugins/Plugin.java | 1 + .../plugins/ReloadablePlugin.java | 54 +++ .../RestReloadSecureSettingsAction.java | 87 ++++ .../action/admin/ReloadSecureSettingsIT.java | 422 ++++++++++++++++++ .../action/admin/invalid.txt.keystore | 3 + 67 files changed, 3323 insertions(+), 1179 deletions(-) create mode 100644 plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AmazonEc2Reference.java create mode 100644 plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2ClientSettings.java rename plugins/{repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceDisableException.java => discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java} (62%) create mode 100644 plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AmazonS3Reference.java create mode 100644 plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java delete mode 100644 plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositorySettingsCredentialsTests.java rename plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceRemoteException.java => server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java (56%) create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java create mode 100644 server/src/main/java/org/elasticsearch/common/util/LazyInitializable.java create mode 100644 server/src/main/java/org/elasticsearch/plugins/ReloadablePlugin.java create mode 100644 server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java create mode 100644 server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java create mode 100644 server/src/test/resources/org/elasticsearch/action/admin/invalid.txt.keystore diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AmazonEc2Reference.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AmazonEc2Reference.java new file mode 100644 index 0000000000000..0b0b208790b48 --- /dev/null +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AmazonEc2Reference.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.ec2; + +import com.amazonaws.services.ec2.AmazonEC2; + +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.util.concurrent.AbstractRefCounted; + +/** + * Handles the shutdown of the wrapped {@link AmazonEC2} using reference + * counting. + */ +public class AmazonEc2Reference extends AbstractRefCounted implements Releasable { + + private final AmazonEC2 client; + + AmazonEc2Reference(AmazonEC2 client) { + super("AWS_EC2_CLIENT"); + this.client = client; + } + + /** + * Call when the client is not needed anymore. + */ + @Override + public void close() { + decRef(); + } + + /** + * Returns the underlying `AmazonEC2` client. All method calls are permitted BUT + * NOT shutdown. Shutdown is called when reference count reaches 0. + */ + public AmazonEC2 client() { + return client; + } + + @Override + protected void closeInternal() { + client.shutdown(); + } + +} diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Service.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Service.java index db3164fe9007a..976f1db26d173 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Service.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Service.java @@ -19,22 +19,17 @@ package org.elasticsearch.discovery.ec2; -import com.amazonaws.ClientConfiguration; -import com.amazonaws.Protocol; -import com.amazonaws.services.ec2.AmazonEC2; -import org.elasticsearch.common.settings.SecureSetting; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; +import java.io.Closeable; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Locale; import java.util.function.Function; -interface AwsEc2Service { +interface AwsEc2Service extends Closeable { Setting AUTO_ATTRIBUTE_SETTING = Setting.boolSetting("cloud.node.auto_attributes", false, Property.NodeScope); class HostType { @@ -45,36 +40,6 @@ class HostType { public static final String TAG_PREFIX = "tag:"; } - /** The access key (ie login id) for connecting to ec2. */ - Setting ACCESS_KEY_SETTING = SecureSetting.secureString("discovery.ec2.access_key", null); - - /** The secret key (ie password) for connecting to ec2. */ - Setting SECRET_KEY_SETTING = SecureSetting.secureString("discovery.ec2.secret_key", null); - - /** An override for the ec2 endpoint to connect to. */ - Setting ENDPOINT_SETTING = new Setting<>("discovery.ec2.endpoint", "", - s -> s.toLowerCase(Locale.ROOT), Property.NodeScope); - - /** The protocol to use to connect to to ec2. */ - Setting PROTOCOL_SETTING = new Setting<>("discovery.ec2.protocol", "https", - s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope); - - /** The host name of a proxy to connect to ec2 through. */ - Setting PROXY_HOST_SETTING = Setting.simpleString("discovery.ec2.proxy.host", Property.NodeScope); - - /** The port of a proxy to connect to ec2 through. */ - Setting PROXY_PORT_SETTING = Setting.intSetting("discovery.ec2.proxy.port", 80, 0, 1<<16, Property.NodeScope); - - /** The username of a proxy to connect to s3 through. */ - Setting PROXY_USERNAME_SETTING = SecureSetting.secureString("discovery.ec2.proxy.username", null); - - /** The password of a proxy to connect to s3 through. */ - Setting PROXY_PASSWORD_SETTING = SecureSetting.secureString("discovery.ec2.proxy.password", null); - - /** The socket timeout for connecting to s3. */ - Setting READ_TIMEOUT_SETTING = Setting.timeSetting("discovery.ec2.read_timeout", - TimeValue.timeValueMillis(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT), Property.NodeScope); - /** * discovery.ec2.host_type: The type of host type to use to communicate with other instances. * Can be one of private_ip, public_ip, private_dns, public_dns or tag:XXXX where @@ -87,26 +52,24 @@ class HostType { * discovery.ec2.any_group: If set to false, will require all security groups to be present for the instance to be used for the * discovery. Defaults to true. */ - Setting ANY_GROUP_SETTING = - Setting.boolSetting("discovery.ec2.any_group", true, Property.NodeScope); + Setting ANY_GROUP_SETTING = Setting.boolSetting("discovery.ec2.any_group", true, Property.NodeScope); /** * discovery.ec2.groups: Either a comma separated list or array based list of (security) groups. Only instances with the provided * security groups will be used in the cluster discovery. (NOTE: You could provide either group NAME or group ID.) */ - Setting> GROUPS_SETTING = - Setting.listSetting("discovery.ec2.groups", new ArrayList<>(), s -> s.toString(), Property.NodeScope); + Setting> GROUPS_SETTING = Setting.listSetting("discovery.ec2.groups", new ArrayList<>(), s -> s.toString(), + Property.NodeScope); /** * discovery.ec2.availability_zones: Either a comma separated list or array based list of availability zones. Only instances within * the provided availability zones will be used in the cluster discovery. */ - Setting> AVAILABILITY_ZONES_SETTING = - Setting.listSetting("discovery.ec2.availability_zones", Collections.emptyList(), s -> s.toString(), - Property.NodeScope); + Setting> AVAILABILITY_ZONES_SETTING = Setting.listSetting("discovery.ec2.availability_zones", Collections.emptyList(), + s -> s.toString(), Property.NodeScope); /** * discovery.ec2.node_cache_time: How long the list of hosts is cached to prevent further requests to the AWS API. Defaults to 10s. */ - Setting NODE_CACHE_TIME_SETTING = - Setting.timeSetting("discovery.ec2.node_cache_time", TimeValue.timeValueSeconds(10), Property.NodeScope); + Setting NODE_CACHE_TIME_SETTING = Setting.timeSetting("discovery.ec2.node_cache_time", TimeValue.timeValueSeconds(10), + Property.NodeScope); /** * discovery.ec2.tag.*: The ec2 discovery can filter machines to include in the cluster based on tags (and not just groups). @@ -115,7 +78,22 @@ class HostType { * instance to be included. */ Setting.AffixSetting> TAG_SETTING = Setting.prefixKeySetting("discovery.ec2.tag.", - key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope)); + key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope)); + + /** + * Builds then caches an {@code AmazonEC2} client using the current client + * settings. Returns an {@code AmazonEc2Reference} wrapper which should be + * released as soon as it is not required anymore. + */ + AmazonEc2Reference client(); + + /** + * Updates the settings for building the client and releases the cached one. + * Future client requests will use the new settings to lazily built the new + * client. + * + * @param clientSettings the new refreshed settings + */ + void refreshAndClearCache(Ec2ClientSettings clientSettings); - AmazonEC2 client(); } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java index b53dc7a876301..67902174630ea 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java @@ -19,12 +19,9 @@ package org.elasticsearch.discovery.ec2; -import java.io.Closeable; -import java.io.IOException; import java.util.Random; +import java.util.concurrent.atomic.AtomicReference; -import com.amazonaws.AmazonClientException; -import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; @@ -35,112 +32,117 @@ import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.AmazonEC2Client; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.LazyInitializable; -class AwsEc2ServiceImpl extends AbstractComponent implements AwsEc2Service, Closeable { +class AwsEc2ServiceImpl extends AbstractComponent implements AwsEc2Service { public static final String EC2_METADATA_URL = "http://169.254.169.254/latest/meta-data/"; - private AmazonEC2Client client; + private final AtomicReference> lazyClientReference = + new AtomicReference<>(); AwsEc2ServiceImpl(Settings settings) { super(settings); } - @Override - public synchronized AmazonEC2 client() { - if (client != null) { - return client; - } - - this.client = new AmazonEC2Client(buildCredentials(logger, settings), buildConfiguration(logger, settings)); - String endpoint = findEndpoint(logger, settings); - if (endpoint != null) { - client.setEndpoint(endpoint); + private AmazonEC2 buildClient(Ec2ClientSettings clientSettings) { + final AWSCredentialsProvider credentials = buildCredentials(logger, clientSettings); + final ClientConfiguration configuration = buildConfiguration(logger, clientSettings); + final AmazonEC2 client = buildClient(credentials, configuration); + if (Strings.hasText(clientSettings.endpoint)) { + logger.debug("using explicit ec2 endpoint [{}]", clientSettings.endpoint); + client.setEndpoint(clientSettings.endpoint); } - - return this.client; + return client; } - protected static AWSCredentialsProvider buildCredentials(Logger logger, Settings settings) { - AWSCredentialsProvider credentials; - - try (SecureString key = ACCESS_KEY_SETTING.get(settings); - SecureString secret = SECRET_KEY_SETTING.get(settings)) { - if (key.length() == 0 && secret.length() == 0) { - logger.debug("Using either environment variables, system properties or instance profile credentials"); - credentials = new DefaultAWSCredentialsProviderChain(); - } else { - logger.debug("Using basic key/secret credentials"); - credentials = new StaticCredentialsProvider(new BasicAWSCredentials(key.toString(), secret.toString())); - } - } - - return credentials; + // proxy for testing + AmazonEC2 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) { + final AmazonEC2 client = new AmazonEC2Client(credentials, configuration); + return client; } - protected static ClientConfiguration buildConfiguration(Logger logger, Settings settings) { - ClientConfiguration clientConfiguration = new ClientConfiguration(); + // pkg private for tests + static ClientConfiguration buildConfiguration(Logger logger, Ec2ClientSettings clientSettings) { + final ClientConfiguration clientConfiguration = new ClientConfiguration(); // the response metadata cache is only there for diagnostics purposes, // but can force objects from every response to the old generation. clientConfiguration.setResponseMetadataCacheSize(0); - clientConfiguration.setProtocol(PROTOCOL_SETTING.get(settings)); - - if (PROXY_HOST_SETTING.exists(settings)) { - String proxyHost = PROXY_HOST_SETTING.get(settings); - Integer proxyPort = PROXY_PORT_SETTING.get(settings); - try (SecureString proxyUsername = PROXY_USERNAME_SETTING.get(settings); - SecureString proxyPassword = PROXY_PASSWORD_SETTING.get(settings)) { - - clientConfiguration - .withProxyHost(proxyHost) - .withProxyPort(proxyPort) - .withProxyUsername(proxyUsername.toString()) - .withProxyPassword(proxyPassword.toString()); - } + clientConfiguration.setProtocol(clientSettings.protocol); + if (Strings.hasText(clientSettings.proxyHost)) { + // TODO: remove this leniency, these settings should exist together and be validated + clientConfiguration.setProxyHost(clientSettings.proxyHost); + clientConfiguration.setProxyPort(clientSettings.proxyPort); + clientConfiguration.setProxyUsername(clientSettings.proxyUsername); + clientConfiguration.setProxyPassword(clientSettings.proxyPassword); } - // Increase the number of retries in case of 5xx API responses final Random rand = Randomness.get(); - RetryPolicy retryPolicy = new RetryPolicy( + final RetryPolicy retryPolicy = new RetryPolicy( RetryPolicy.RetryCondition.NO_RETRY_CONDITION, - new RetryPolicy.BackoffStrategy() { - @Override - public long delayBeforeNextRetry(AmazonWebServiceRequest originalRequest, - AmazonClientException exception, - int retriesAttempted) { - // with 10 retries the max delay time is 320s/320000ms (10 * 2^5 * 1 * 1000) - logger.warn("EC2 API request failed, retry again. Reason was:", exception); - return 1000L * (long) (10d * Math.pow(2, retriesAttempted / 2.0d) * (1.0d + rand.nextDouble())); - } + (originalRequest, exception, retriesAttempted) -> { + // with 10 retries the max delay time is 320s/320000ms (10 * 2^5 * 1 * 1000) + logger.warn("EC2 API request failed, retry again. Reason was:", exception); + return 1000L * (long) (10d * Math.pow(2, retriesAttempted / 2.0d) * (1.0d + rand.nextDouble())); }, 10, false); clientConfiguration.setRetryPolicy(retryPolicy); - clientConfiguration.setSocketTimeout((int) READ_TIMEOUT_SETTING.get(settings).millis()); - + clientConfiguration.setSocketTimeout(clientSettings.readTimeoutMillis); return clientConfiguration; } - protected static String findEndpoint(Logger logger, Settings settings) { - String endpoint = null; - if (ENDPOINT_SETTING.exists(settings)) { - endpoint = ENDPOINT_SETTING.get(settings); - logger.debug("using explicit ec2 endpoint [{}]", endpoint); + // pkg private for tests + static AWSCredentialsProvider buildCredentials(Logger logger, Ec2ClientSettings clientSettings) { + final BasicAWSCredentials credentials = clientSettings.credentials; + if (credentials == null) { + logger.debug("Using either environment variables, system properties or instance profile credentials"); + return new DefaultAWSCredentialsProviderChain(); + } else { + logger.debug("Using basic key/secret credentials"); + return new StaticCredentialsProvider(credentials); } - return endpoint; } @Override - public void close() throws IOException { - if (client != null) { - client.shutdown(); + public AmazonEc2Reference client() { + final LazyInitializable clientReference = this.lazyClientReference.get(); + if (clientReference == null) { + throw new IllegalStateException("Missing ec2 client configs"); } + return clientReference.getOrCompute(); + } - // Ensure that IdleConnectionReaper is shutdown + /** + * Refreshes the settings for the AmazonEC2 client. The new client will be build + * using these new settings. The old client is usable until released. On release it + * will be destroyed instead of being returned to the cache. + */ + @Override + public void refreshAndClearCache(Ec2ClientSettings clientSettings) { + final LazyInitializable newClient = new LazyInitializable<>( + () -> new AmazonEc2Reference(buildClient(clientSettings)), clientReference -> clientReference.incRef(), + clientReference -> clientReference.decRef()); + final LazyInitializable oldClient = this.lazyClientReference.getAndSet(newClient); + if (oldClient != null) { + oldClient.reset(); + } + } + + @Override + public void close() { + final LazyInitializable clientReference = this.lazyClientReference.getAndSet(null); + if (clientReference != null) { + clientReference.reset(); + } + // shutdown IdleConnectionReaper background thread + // it will be restarted on new client usage IdleConnectionReaper.shutdown(); } + } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java index f291413d408ed..2c536981b04c5 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java @@ -20,7 +20,6 @@ package org.elasticsearch.discovery.ec2; import com.amazonaws.AmazonClientException; -import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.model.DescribeInstancesRequest; import com.amazonaws.services.ec2.model.DescribeInstancesResult; import com.amazonaws.services.ec2.model.Filter; @@ -59,7 +58,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos private final TransportService transportService; - private final AmazonEC2 client; + private final AwsEc2Service awsEc2Service; private final boolean bindAnyGroup; @@ -76,7 +75,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos AwsEc2UnicastHostsProvider(Settings settings, TransportService transportService, AwsEc2Service awsEc2Service) { super(settings); this.transportService = transportService; - this.client = awsEc2Service.client(); + this.awsEc2Service = awsEc2Service; this.hostType = AwsEc2Service.HOST_TYPE_SETTING.get(settings); this.discoNodes = new DiscoNodesCache(AwsEc2Service.NODE_CACHE_TIME_SETTING.get(settings)); @@ -103,31 +102,31 @@ public List buildDynamicNodes() { protected List fetchDynamicNodes() { - List discoNodes = new ArrayList<>(); + final List discoNodes = new ArrayList<>(); - DescribeInstancesResult descInstances; - try { + final DescribeInstancesResult descInstances; + try (AmazonEc2Reference clientReference = awsEc2Service.client()) { // Query EC2 API based on AZ, instance state, and tag. // NOTE: we don't filter by security group during the describe instances request for two reasons: // 1. differences in VPCs require different parameters during query (ID vs Name) // 2. We want to use two different strategies: (all security groups vs. any security groups) - descInstances = SocketAccess.doPrivileged(() -> client.describeInstances(buildDescribeInstancesRequest())); - } catch (AmazonClientException e) { + descInstances = SocketAccess.doPrivileged(() -> clientReference.client().describeInstances(buildDescribeInstancesRequest())); + } catch (final AmazonClientException e) { logger.info("Exception while retrieving instance list from AWS API: {}", e.getMessage()); logger.debug("Full exception:", e); return discoNodes; } logger.trace("building dynamic unicast discovery nodes..."); - for (Reservation reservation : descInstances.getReservations()) { - for (Instance instance : reservation.getInstances()) { + for (final Reservation reservation : descInstances.getReservations()) { + for (final Instance instance : reservation.getInstances()) { // lets see if we can filter based on groups if (!groups.isEmpty()) { - List instanceSecurityGroups = instance.getSecurityGroups(); - List securityGroupNames = new ArrayList<>(instanceSecurityGroups.size()); - List securityGroupIds = new ArrayList<>(instanceSecurityGroups.size()); - for (GroupIdentifier sg : instanceSecurityGroups) { + final List instanceSecurityGroups = instance.getSecurityGroups(); + final List securityGroupNames = new ArrayList<>(instanceSecurityGroups.size()); + final List securityGroupIds = new ArrayList<>(instanceSecurityGroups.size()); + for (final GroupIdentifier sg : instanceSecurityGroups) { securityGroupNames.add(sg.getGroupName()); securityGroupIds.add(sg.getGroupId()); } @@ -162,10 +161,10 @@ && disjoint(securityGroupIds, groups)) { address = instance.getPublicIpAddress(); } else if (hostType.startsWith(TAG_PREFIX)) { // Reading the node host from its metadata - String tagName = hostType.substring(TAG_PREFIX.length()); + final String tagName = hostType.substring(TAG_PREFIX.length()); logger.debug("reading hostname from [{}] instance tag", tagName); - List tags = instance.getTags(); - for (Tag tag : tags) { + final List tags = instance.getTags(); + for (final Tag tag : tags) { if (tag.getKey().equals(tagName)) { address = tag.getValue(); logger.debug("using [{}] as the instance address", address); @@ -177,13 +176,13 @@ && disjoint(securityGroupIds, groups)) { if (address != null) { try { // we only limit to 1 port per address, makes no sense to ping 100 ports - TransportAddress[] addresses = transportService.addressesFromString(address, 1); + final TransportAddress[] addresses = transportService.addressesFromString(address, 1); for (int i = 0; i < addresses.length; i++) { logger.trace("adding {}, address {}, transport_address {}", instance.getInstanceId(), address, addresses[i]); discoNodes.add(new DiscoveryNode(instance.getInstanceId(), "#cloud-" + instance.getInstanceId() + "-" + i, addresses[i], emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion())); } - } catch (Exception e) { + } catch (final Exception e) { final String finalAddress = address; logger.warn( (Supplier) @@ -201,12 +200,12 @@ && disjoint(securityGroupIds, groups)) { } private DescribeInstancesRequest buildDescribeInstancesRequest() { - DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest() + final DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest() .withFilters( new Filter("instance-state-name").withValues("running", "pending") ); - for (Map.Entry> tagFilter : tags.entrySet()) { + for (final Map.Entry> tagFilter : tags.entrySet()) { // for a given tag key, OR relationship for multiple different values describeInstancesRequest.withFilters( new Filter("tag:" + tagFilter.getKey()).withValues(tagFilter.getValue()) @@ -238,7 +237,7 @@ protected boolean needsRefresh() { @Override protected List refresh() { - List nodes = fetchDynamicNodes(); + final List nodes = fetchDynamicNodes(); empty = nodes.isEmpty(); return nodes; } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2ClientSettings.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2ClientSettings.java new file mode 100644 index 0000000000000..b42b0d546001a --- /dev/null +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2ClientSettings.java @@ -0,0 +1,145 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.ec2; + +import com.amazonaws.ClientConfiguration; +import com.amazonaws.Protocol; +import com.amazonaws.auth.BasicAWSCredentials; + +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.unit.TimeValue; +import java.util.Locale; + +/** + * A container for settings used to create an EC2 client. + */ +final class Ec2ClientSettings { + + /** The access key (ie login id) for connecting to ec2. */ + static final Setting ACCESS_KEY_SETTING = SecureSetting.secureString("discovery.ec2.access_key", null); + + /** The secret key (ie password) for connecting to ec2. */ + static final Setting SECRET_KEY_SETTING = SecureSetting.secureString("discovery.ec2.secret_key", null); + + /** The host name of a proxy to connect to ec2 through. */ + static final Setting PROXY_HOST_SETTING = Setting.simpleString("discovery.ec2.proxy.host", Property.NodeScope); + + /** The port of a proxy to connect to ec2 through. */ + static final Setting PROXY_PORT_SETTING = Setting.intSetting("discovery.ec2.proxy.port", 80, 0, 1 << 16, Property.NodeScope); + + /** An override for the ec2 endpoint to connect to. */ + static final Setting ENDPOINT_SETTING = new Setting<>("discovery.ec2.endpoint", "", s -> s.toLowerCase(Locale.ROOT), + Property.NodeScope); + + /** The protocol to use to connect to to ec2. */ + static final Setting PROTOCOL_SETTING = new Setting<>("discovery.ec2.protocol", "https", + s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope); + + /** The username of a proxy to connect to s3 through. */ + static final Setting PROXY_USERNAME_SETTING = SecureSetting.secureString("discovery.ec2.proxy.username", null); + + /** The password of a proxy to connect to s3 through. */ + static final Setting PROXY_PASSWORD_SETTING = SecureSetting.secureString("discovery.ec2.proxy.password", null); + + /** The socket timeout for connecting to s3. */ + static final Setting READ_TIMEOUT_SETTING = Setting.timeSetting("discovery.ec2.read_timeout", + TimeValue.timeValueMillis(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT), Property.NodeScope); + + /** Credentials to authenticate with ec2. */ + final BasicAWSCredentials credentials; + + /** + * The ec2 endpoint the client should talk to, or empty string to use the + * default. + */ + final String endpoint; + + /** The protocol to use to talk to ec2. Defaults to https. */ + final Protocol protocol; + + /** An optional proxy host that requests to ec2 should be made through. */ + final String proxyHost; + + /** The port number the proxy host should be connected on. */ + final int proxyPort; + + // these should be "secure" yet the api for the ec2 client only takes String, so + // storing them + // as SecureString here won't really help with anything + /** An optional username for the proxy host, for basic authentication. */ + final String proxyUsername; + + /** An optional password for the proxy host, for basic authentication. */ + final String proxyPassword; + + /** The read timeout for the ec2 client. */ + final int readTimeoutMillis; + + protected Ec2ClientSettings(BasicAWSCredentials credentials, String endpoint, Protocol protocol, String proxyHost, int proxyPort, + String proxyUsername, String proxyPassword, int readTimeoutMillis) { + this.credentials = credentials; + this.endpoint = endpoint; + this.protocol = protocol; + this.proxyHost = proxyHost; + this.proxyPort = proxyPort; + this.proxyUsername = proxyUsername; + this.proxyPassword = proxyPassword; + this.readTimeoutMillis = readTimeoutMillis; + } + + static BasicAWSCredentials loadCredentials(Settings settings) { + try (SecureString accessKey = ACCESS_KEY_SETTING.get(settings); + SecureString secretKey = SECRET_KEY_SETTING.get(settings);) { + if (accessKey.length() != 0) { + if (secretKey.length() != 0) { + return new BasicAWSCredentials(accessKey.toString(), secretKey.toString()); + } else { + throw new IllegalArgumentException("Missing secret key for ec2 client."); + } + } else if (secretKey.length() != 0) { + throw new IllegalArgumentException("Missing access key for ec2 client."); + } + return null; + } + } + + // pkg private for tests + /** Parse settings for a single client. */ + static Ec2ClientSettings getClientSettings(Settings settings) { + final BasicAWSCredentials credentials = loadCredentials(settings); + try (SecureString proxyUsername = PROXY_USERNAME_SETTING.get(settings); + SecureString proxyPassword = PROXY_PASSWORD_SETTING.get(settings)) { + return new Ec2ClientSettings( + credentials, + ENDPOINT_SETTING.get(settings), + PROTOCOL_SETTING.get(settings), + PROXY_HOST_SETTING.get(settings), + PROXY_PORT_SETTING.get(settings), + proxyUsername.toString(), + proxyPassword.toString(), + (int)READ_TIMEOUT_SETTING.get(settings).millis()); + } + } + +} diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java index 28d563e6a9ca6..9fc32ea306c0e 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java @@ -21,8 +21,6 @@ import com.amazonaws.util.json.Jackson; import org.apache.logging.log4j.Logger; -import org.elasticsearch.core.internal.io.IOUtils; -import org.apache.lucene.util.SetOnce; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.logging.Loggers; @@ -33,10 +31,10 @@ import org.elasticsearch.node.Node; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.transport.TransportService; import java.io.BufferedReader; -import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -52,7 +50,7 @@ import java.util.Map; import java.util.function.Supplier; -public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Closeable { +public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, ReloadablePlugin { private static Logger logger = Loggers.getLogger(Ec2DiscoveryPlugin.class); public static final String EC2 = "ec2"; @@ -68,22 +66,27 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close // ClientConfiguration clinit has some classloader problems // TODO: fix that Class.forName("com.amazonaws.ClientConfiguration"); - } catch (ClassNotFoundException e) { + } catch (final ClassNotFoundException e) { throw new RuntimeException(e); } return null; }); } - private Settings settings; - // stashed when created in order to properly close - private final SetOnce ec2Service = new SetOnce<>(); + private final Settings settings; + // protected for testing + protected final AwsEc2Service ec2Service; public Ec2DiscoveryPlugin(Settings settings) { - this.settings = settings; + this(settings, new AwsEc2ServiceImpl(settings)); } - + protected Ec2DiscoveryPlugin(Settings settings, AwsEc2ServiceImpl ec2Service) { + this.settings = settings; + this.ec2Service = ec2Service; + // eagerly load client settings when secure settings are accessible + reload(settings); + } @Override public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) { @@ -94,25 +97,22 @@ public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings @Override public Map> getZenHostsProviders(TransportService transportService, NetworkService networkService) { - return Collections.singletonMap(EC2, () -> { - ec2Service.set(new AwsEc2ServiceImpl(settings)); - return new AwsEc2UnicastHostsProvider(settings, transportService, ec2Service.get()); - }); + return Collections.singletonMap(EC2, () -> new AwsEc2UnicastHostsProvider(settings, transportService, ec2Service)); } @Override public List> getSettings() { return Arrays.asList( // Register EC2 discovery settings: discovery.ec2 - AwsEc2Service.ACCESS_KEY_SETTING, - AwsEc2Service.SECRET_KEY_SETTING, - AwsEc2Service.ENDPOINT_SETTING, - AwsEc2Service.PROTOCOL_SETTING, - AwsEc2Service.PROXY_HOST_SETTING, - AwsEc2Service.PROXY_PORT_SETTING, - AwsEc2Service.PROXY_USERNAME_SETTING, - AwsEc2Service.PROXY_PASSWORD_SETTING, - AwsEc2Service.READ_TIMEOUT_SETTING, + Ec2ClientSettings.ACCESS_KEY_SETTING, + Ec2ClientSettings.SECRET_KEY_SETTING, + Ec2ClientSettings.ENDPOINT_SETTING, + Ec2ClientSettings.PROTOCOL_SETTING, + Ec2ClientSettings.PROXY_HOST_SETTING, + Ec2ClientSettings.PROXY_PORT_SETTING, + Ec2ClientSettings.PROXY_USERNAME_SETTING, + Ec2ClientSettings.PROXY_PASSWORD_SETTING, + Ec2ClientSettings.READ_TIMEOUT_SETTING, AwsEc2Service.HOST_TYPE_SETTING, AwsEc2Service.ANY_GROUP_SETTING, AwsEc2Service.GROUPS_SETTING, @@ -125,10 +125,10 @@ public List> getSettings() { @Override public Settings additionalSettings() { - Settings.Builder builder = Settings.builder(); + final Settings.Builder builder = Settings.builder(); // Adds a node attribute for the ec2 availability zone - String azMetadataUrl = AwsEc2ServiceImpl.EC2_METADATA_URL + "placement/availability-zone"; + final String azMetadataUrl = AwsEc2ServiceImpl.EC2_METADATA_URL + "placement/availability-zone"; builder.put(getAvailabilityZoneNodeAttributes(settings, azMetadataUrl)); return builder.build(); } @@ -139,7 +139,7 @@ static Settings getAvailabilityZoneNodeAttributes(Settings settings, String azMe if (AwsEc2Service.AUTO_ATTRIBUTE_SETTING.get(settings) == false) { return Settings.EMPTY; } - Settings.Builder attrs = Settings.builder(); + final Settings.Builder attrs = Settings.builder(); final URL url; final URLConnection urlConnection; @@ -148,7 +148,7 @@ static Settings getAvailabilityZoneNodeAttributes(Settings settings, String azMe logger.debug("obtaining ec2 [placement/availability-zone] from ec2 meta-data url {}", url); urlConnection = SocketAccess.doPrivilegedIOException(url::openConnection); urlConnection.setConnectTimeout(2000); - } catch (IOException e) { + } catch (final IOException e) { // should not happen, we know the url is not malformed, and openConnection does not actually hit network throw new UncheckedIOException(e); } @@ -156,13 +156,13 @@ static Settings getAvailabilityZoneNodeAttributes(Settings settings, String azMe try (InputStream in = SocketAccess.doPrivilegedIOException(urlConnection::getInputStream); BufferedReader urlReader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8))) { - String metadataResult = urlReader.readLine(); - if (metadataResult == null || metadataResult.length() == 0) { + final String metadataResult = urlReader.readLine(); + if ((metadataResult == null) || (metadataResult.length() == 0)) { throw new IllegalStateException("no ec2 metadata returned from " + url); } else { attrs.put(Node.NODE_ATTRIBUTES.getKey() + "aws_availability_zone", metadataResult); } - } catch (IOException e) { + } catch (final IOException e) { // this is lenient so the plugin does not fail when installed outside of ec2 logger.error("failed to get metadata for [placement/availability-zone]", e); } @@ -172,6 +172,13 @@ static Settings getAvailabilityZoneNodeAttributes(Settings settings, String azMe @Override public void close() throws IOException { - IOUtils.close(ec2Service.get()); + ec2Service.close(); + } + + @Override + public void reload(Settings settings) { + // secure settings should be readable + final Ec2ClientSettings clientSettings = Ec2ClientSettings.getClientSettings(settings); + ec2Service.refreshAndClearCache(clientSettings); } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java index 34ad449d06e8d..aa08447fd208b 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java @@ -22,7 +22,9 @@ import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.AmazonWebServiceRequest; +import com.amazonaws.ClientConfiguration; import com.amazonaws.ResponseMetadata; +import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.regions.Region; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.model.AcceptVpcPeeringConnectionRequest; @@ -528,9 +530,12 @@ public class AmazonEC2Mock implements AmazonEC2 { public static final String PREFIX_PRIVATE_DNS = "mock-ip-"; public static final String SUFFIX_PRIVATE_DNS = ".ec2.internal"; - List instances = new ArrayList<>(); + final List instances = new ArrayList<>(); + String endpoint; + final AWSCredentialsProvider credentials; + final ClientConfiguration configuration; - public AmazonEC2Mock(int nodes, List> tagsList) { + public AmazonEC2Mock(int nodes, List> tagsList, AWSCredentialsProvider credentials, ClientConfiguration configuration) { if (tagsList != null) { assert tagsList.size() == nodes; } @@ -552,7 +557,8 @@ public AmazonEC2Mock(int nodes, List> tagsList) { instances.add(instance); } - + this.credentials = credentials; + this.configuration = configuration; } @Override @@ -642,7 +648,7 @@ public DescribeInstancesResult describeInstances(DescribeInstancesRequest descri @Override public void setEndpoint(String endpoint) throws IllegalArgumentException { - throw new UnsupportedOperationException("Not supported in mock"); + this.endpoint = endpoint; } @Override @@ -2110,7 +2116,6 @@ public DryRunResult dryRun(DryRunSupporte @Override public void shutdown() { - throw new UnsupportedOperationException("Not supported in mock"); } @Override diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImplTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImplTests.java index e5841fbc36ff9..a13fe47a632ae 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImplTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImplTests.java @@ -26,31 +26,31 @@ import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.ec2.AwsEc2Service; import org.elasticsearch.discovery.ec2.AwsEc2ServiceImpl; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; public class AwsEc2ServiceImplTests extends ESTestCase { public void testAWSCredentialsWithSystemProviders() { - AWSCredentialsProvider credentialsProvider = AwsEc2ServiceImpl.buildCredentials(logger, Settings.EMPTY); + final AWSCredentialsProvider credentialsProvider = AwsEc2ServiceImpl.buildCredentials(logger, + Ec2ClientSettings.getClientSettings(Settings.EMPTY)); assertThat(credentialsProvider, instanceOf(DefaultAWSCredentialsProviderChain.class)); } public void testAWSCredentialsWithElasticsearchAwsSettings() { - MockSecureSettings secureSettings = new MockSecureSettings(); + final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("discovery.ec2.access_key", "aws_key"); secureSettings.setString("discovery.ec2.secret_key", "aws_secret"); - Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); launchAWSCredentialsWithElasticsearchSettingsTest(settings, "aws_key", "aws_secret"); } protected void launchAWSCredentialsWithElasticsearchSettingsTest(Settings settings, String expectedKey, String expectedSecret) { - AWSCredentials credentials = AwsEc2ServiceImpl.buildCredentials(logger, settings).getCredentials(); + final AWSCredentials credentials = AwsEc2ServiceImpl.buildCredentials(logger, Ec2ClientSettings.getClientSettings(settings)) + .getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is(expectedKey)); assertThat(credentials.getAWSSecretKey(), is(expectedSecret)); } @@ -61,10 +61,10 @@ public void testAWSDefaultConfiguration() { } public void testAWSConfigurationWithAwsSettings() { - MockSecureSettings secureSettings = new MockSecureSettings(); + final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("discovery.ec2.proxy.username", "aws_proxy_username"); secureSettings.setString("discovery.ec2.proxy.password", "aws_proxy_password"); - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .put("discovery.ec2.protocol", "http") .put("discovery.ec2.proxy.host", "aws_proxy_host") .put("discovery.ec2.proxy.port", 8080) @@ -81,7 +81,8 @@ protected void launchAWSConfigurationTest(Settings settings, String expectedProxyUsername, String expectedProxyPassword, int expectedReadTimeout) { - ClientConfiguration configuration = AwsEc2ServiceImpl.buildConfiguration(logger, settings); + final ClientConfiguration configuration = AwsEc2ServiceImpl.buildConfiguration(logger, + Ec2ClientSettings.getClientSettings(settings)); assertThat(configuration.getResponseMetadataCacheSize(), is(0)); assertThat(configuration.getProtocol(), is(expectedProtocol)); @@ -92,16 +93,4 @@ protected void launchAWSConfigurationTest(Settings settings, assertThat(configuration.getSocketTimeout(), is(expectedReadTimeout)); } - public void testDefaultEndpoint() { - String endpoint = AwsEc2ServiceImpl.findEndpoint(logger, Settings.EMPTY); - assertThat(endpoint, nullValue()); - } - - public void testSpecificEndpoint() { - Settings settings = Settings.builder() - .put(AwsEc2Service.ENDPOINT_SETTING.getKey(), "ec2.endpoint") - .build(); - String endpoint = AwsEc2ServiceImpl.findEndpoint(logger, settings); - assertThat(endpoint, is("ec2.endpoint")); - } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java index e29821efda223..0596dd697b2eb 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java @@ -19,18 +19,19 @@ package org.elasticsearch.discovery.ec2; +import com.amazonaws.ClientConfiguration; +import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.model.Tag; -import org.elasticsearch.common.component.AbstractLifecycleComponent; + import org.elasticsearch.common.settings.Settings; import java.util.List; -public class AwsEc2ServiceMock extends AbstractLifecycleComponent implements AwsEc2Service { +public class AwsEc2ServiceMock extends AwsEc2ServiceImpl { - private int nodes; - private List> tagsList; - private AmazonEC2 client; + private final int nodes; + private final List> tagsList; public AwsEc2ServiceMock(Settings settings, int nodes, List> tagsList) { super(settings); @@ -39,26 +40,8 @@ public AwsEc2ServiceMock(Settings settings, int nodes, List> tagsList) } @Override - public synchronized AmazonEC2 client() { - if (client == null) { - client = new AmazonEC2Mock(nodes, tagsList); - } - - return client; + AmazonEC2 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) { + return new AmazonEC2Mock(nodes, tagsList, credentials, configuration); } - @Override - protected void doStart() { - - } - - @Override - protected void doStop() { - - } - - @Override - protected void doClose() { - - } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceDisableException.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java similarity index 62% rename from plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceDisableException.java rename to plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java index a100079668b54..a92bd243bc9b7 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceDisableException.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java @@ -17,14 +17,22 @@ * under the License. */ -package org.elasticsearch.repositories.azure; +package org.elasticsearch.discovery.ec2; -public class AzureServiceDisableException extends IllegalStateException { - public AzureServiceDisableException(String msg) { - super(msg); +import com.amazonaws.services.ec2.model.Tag; + +import org.elasticsearch.common.settings.Settings; + +import java.util.List; + +public class Ec2DiscoveryPluginMock extends Ec2DiscoveryPlugin { + + Ec2DiscoveryPluginMock(Settings settings) { + this(settings, 1, null); } - public AzureServiceDisableException(String msg, Throwable cause) { - super(msg, cause); + public Ec2DiscoveryPluginMock(Settings settings, int nodes, List> tagsList) { + super(settings, new AwsEc2ServiceMock(settings, nodes, tagsList)); } + } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java index 9bb75c0b09f97..6001ab56d5042 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java @@ -19,12 +19,17 @@ package org.elasticsearch.discovery.ec2; +import java.io.IOException; import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + import org.elasticsearch.discovery.ec2.AwsEc2Service; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.ec2.Ec2DiscoveryPlugin; import org.elasticsearch.node.Node; @@ -33,14 +38,14 @@ public class Ec2DiscoveryPluginTests extends ESTestCase { private Settings getNodeAttributes(Settings settings, String url) { - Settings realSettings = Settings.builder() + final Settings realSettings = Settings.builder() .put(AwsEc2Service.AUTO_ATTRIBUTE_SETTING.getKey(), true) .put(settings).build(); return Ec2DiscoveryPlugin.getAvailabilityZoneNodeAttributes(realSettings, url); } private void assertNodeAttributes(Settings settings, String url, String expected) { - Settings additional = getNodeAttributes(settings, url); + final Settings additional = getNodeAttributes(settings, url); if (expected == null) { assertTrue(additional.isEmpty()); } else { @@ -49,36 +54,106 @@ private void assertNodeAttributes(Settings settings, String url, String expected } public void testNodeAttributesDisabled() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .put(AwsEc2Service.AUTO_ATTRIBUTE_SETTING.getKey(), false).build(); assertNodeAttributes(settings, "bogus", null); } public void testNodeAttributes() throws Exception { - Path zoneUrl = createTempFile(); + final Path zoneUrl = createTempFile(); Files.write(zoneUrl, Arrays.asList("us-east-1c")); assertNodeAttributes(Settings.EMPTY, zoneUrl.toUri().toURL().toString(), "us-east-1c"); } public void testNodeAttributesBogusUrl() { - UncheckedIOException e = expectThrows(UncheckedIOException.class, () -> + final UncheckedIOException e = expectThrows(UncheckedIOException.class, () -> getNodeAttributes(Settings.EMPTY, "bogus") ); assertNotNull(e.getCause()); - String msg = e.getCause().getMessage(); + final String msg = e.getCause().getMessage(); assertTrue(msg, msg.contains("no protocol: bogus")); } public void testNodeAttributesEmpty() throws Exception { - Path zoneUrl = createTempFile(); - IllegalStateException e = expectThrows(IllegalStateException.class, () -> + final Path zoneUrl = createTempFile(); + final IllegalStateException e = expectThrows(IllegalStateException.class, () -> getNodeAttributes(Settings.EMPTY, zoneUrl.toUri().toURL().toString()) ); assertTrue(e.getMessage(), e.getMessage().contains("no ec2 metadata returned")); } public void testNodeAttributesErrorLenient() throws Exception { - Path dne = createTempDir().resolve("dne"); + final Path dne = createTempDir().resolve("dne"); assertNodeAttributes(Settings.EMPTY, dne.toUri().toURL().toString(), null); } + + public void testDefaultEndpoint() throws IOException { + try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY)) { + final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().client()).endpoint; + assertThat(endpoint, nullValue()); + } + } + + public void testSpecificEndpoint() throws IOException { + final Settings settings = Settings.builder().put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), "ec2.endpoint").build(); + try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings)) { + final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().client()).endpoint; + assertThat(endpoint, is("ec2.endpoint")); + } + } + + public void testClientSettingsReInit() throws IOException { + final MockSecureSettings mockSecure1 = new MockSecureSettings(); + mockSecure1.setString(Ec2ClientSettings.ACCESS_KEY_SETTING.getKey(), "ec2_access_1"); + mockSecure1.setString(Ec2ClientSettings.SECRET_KEY_SETTING.getKey(), "ec2_secret_1"); + mockSecure1.setString(Ec2ClientSettings.PROXY_USERNAME_SETTING.getKey(), "proxy_username_1"); + mockSecure1.setString(Ec2ClientSettings.PROXY_PASSWORD_SETTING.getKey(), "proxy_password_1"); + final Settings settings1 = Settings.builder() + .put(Ec2ClientSettings.PROXY_HOST_SETTING.getKey(), "proxy_host_1") + .put(Ec2ClientSettings.PROXY_PORT_SETTING.getKey(), 881) + .put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), "ec2_endpoint_1") + .setSecureSettings(mockSecure1) + .build(); + final MockSecureSettings mockSecure2 = new MockSecureSettings(); + mockSecure2.setString(Ec2ClientSettings.ACCESS_KEY_SETTING.getKey(), "ec2_access_2"); + mockSecure2.setString(Ec2ClientSettings.SECRET_KEY_SETTING.getKey(), "ec2_secret_2"); + mockSecure2.setString(Ec2ClientSettings.PROXY_USERNAME_SETTING.getKey(), "proxy_username_2"); + mockSecure2.setString(Ec2ClientSettings.PROXY_PASSWORD_SETTING.getKey(), "proxy_password_2"); + final Settings settings2 = Settings.builder() + .put(Ec2ClientSettings.PROXY_HOST_SETTING.getKey(), "proxy_host_2") + .put(Ec2ClientSettings.PROXY_PORT_SETTING.getKey(), 882) + .put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), "ec2_endpoint_2") + .setSecureSettings(mockSecure2) + .build(); + try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings1)) { + try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) { + assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSAccessKeyId(), is("ec2_access_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSSecretKey(), is("ec2_secret_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); + assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + // reload secure settings2 + plugin.reload(settings2); + // client is not released, it is still using the old settings + assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSAccessKeyId(), is("ec2_access_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSSecretKey(), is("ec2_secret_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); + assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + } + try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) { + assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSAccessKeyId(), is("ec2_access_2")); + assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSSecretKey(), is("ec2_secret_2")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_2")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_2")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_2")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(882)); + assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_2")); + } + } + } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index e7986cb878e41..43cc924fadb10 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -39,6 +39,7 @@ import org.junit.Before; import org.junit.BeforeClass; +import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; @@ -91,11 +92,15 @@ protected List buildDynamicNodes(Settings nodeSettings, int nodes } protected List buildDynamicNodes(Settings nodeSettings, int nodes, List> tagsList) { - AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(nodeSettings, nodes, tagsList); - AwsEc2UnicastHostsProvider provider = new AwsEc2UnicastHostsProvider(nodeSettings, transportService, awsEc2Service); - List discoveryNodes = provider.buildDynamicNodes(); - logger.debug("--> nodes found: {}", discoveryNodes); - return discoveryNodes; + try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY, nodes, tagsList)) { + AwsEc2UnicastHostsProvider provider = new AwsEc2UnicastHostsProvider(nodeSettings, transportService, plugin.ec2Service); + List discoveryNodes = provider.buildDynamicNodes(); + logger.debug("--> nodes found: {}", discoveryNodes); + return discoveryNodes; + } catch (IOException e) { + fail("Unexpected IOException"); + return null; + } } public void testDefaultSettings() throws InterruptedException { @@ -315,22 +320,23 @@ protected List fetchDynamicNodes() { public void testGetNodeListCached() throws Exception { Settings.Builder builder = Settings.builder() .put(AwsEc2Service.NODE_CACHE_TIME_SETTING.getKey(), "500ms"); - AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(Settings.EMPTY, 1, null); - DummyEc2HostProvider provider = new DummyEc2HostProvider(builder.build(), transportService, awsEc2Service) { - @Override - protected List fetchDynamicNodes() { - fetchCount++; - return Ec2DiscoveryTests.this.buildDynamicNodes(Settings.EMPTY, 1); + try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY)) { + DummyEc2HostProvider provider = new DummyEc2HostProvider(builder.build(), transportService, plugin.ec2Service) { + @Override + protected List fetchDynamicNodes() { + fetchCount++; + return Ec2DiscoveryTests.this.buildDynamicNodes(Settings.EMPTY, 1); + } + }; + for (int i=0; i<3; i++) { + provider.buildDynamicNodes(); } - }; - for (int i=0; i<3; i++) { - provider.buildDynamicNodes(); - } - assertThat(provider.fetchCount, is(1)); - Thread.sleep(1_000L); // wait for cache to expire - for (int i=0; i<3; i++) { - provider.buildDynamicNodes(); + assertThat(provider.fetchCount, is(1)); + Thread.sleep(1_000L); // wait for cache to expire + for (int i=0; i<3; i++) { + provider.buildDynamicNodes(); + } + assertThat(provider.fetchCount, is(2)); } - assertThat(provider.fetchCount, is(2)); } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java index fc6d9d7e482a8..bcd6b936af1aa 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java @@ -20,46 +20,44 @@ package org.elasticsearch.repositories.azure; import com.microsoft.azure.storage.LocationMode; + import com.microsoft.azure.storage.StorageException; import org.elasticsearch.cluster.metadata.RepositoryMetaData; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; - import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; import java.nio.file.FileAlreadyExistsException; -import java.util.Locale; import java.util.Map; +import static java.util.Collections.emptyMap; + import static org.elasticsearch.repositories.azure.AzureRepository.Repository; public class AzureBlobStore extends AbstractComponent implements BlobStore { - private final AzureStorageService client; + private final AzureStorageService service; private final String clientName; - private final LocationMode locMode; private final String container; + private final LocationMode locationMode; - public AzureBlobStore(RepositoryMetaData metadata, Settings settings, - AzureStorageService client) throws URISyntaxException, StorageException { + public AzureBlobStore(RepositoryMetaData metadata, Settings settings, AzureStorageService service) + throws URISyntaxException, StorageException { super(settings); - this.client = client; this.container = Repository.CONTAINER_SETTING.get(metadata.settings()); this.clientName = Repository.CLIENT_NAME.get(metadata.settings()); - - String modeStr = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); - if (Strings.hasLength(modeStr)) { - this.locMode = LocationMode.valueOf(modeStr.toUpperCase(Locale.ROOT)); - } else { - this.locMode = LocationMode.PRIMARY_ONLY; - } + this.service = service; + // locationMode is set per repository, not per client + this.locationMode = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); + final Map prevSettings = this.service.refreshAndClearCache(emptyMap()); + final Map newSettings = AzureStorageSettings.overrideLocationMode(prevSettings, this.locationMode); + this.service.refreshAndClearCache(newSettings); } @Override @@ -71,7 +69,11 @@ public String toString() { * Gets the configured {@link LocationMode} for the Azure storage requests. */ public LocationMode getLocationMode() { - return locMode; + return locationMode; + } + + public String getClientName() { + return clientName; } @Override @@ -80,12 +82,13 @@ public BlobContainer blobContainer(BlobPath path) { } @Override - public void delete(BlobPath path) { - String keyPath = path.buildAsString(); + public void delete(BlobPath path) throws IOException { + final String keyPath = path.buildAsString(); try { - this.client.deleteFiles(this.clientName, this.locMode, container, keyPath); + service.deleteFiles(clientName, container, keyPath); } catch (URISyntaxException | StorageException e) { - logger.warn("can not remove [{}] in container {{}}: {}", keyPath, container, e.getMessage()); + logger.warn("cannot access [{}] in container {{}}: {}", keyPath, container, e.getMessage()); + throw new IOException(e); } } @@ -93,30 +96,29 @@ public void delete(BlobPath path) { public void close() { } - public boolean doesContainerExist() - { - return this.client.doesContainerExist(this.clientName, this.locMode, container); + public boolean containerExist() throws URISyntaxException, StorageException { + return service.doesContainerExist(clientName, container); } public boolean blobExists(String blob) throws URISyntaxException, StorageException { - return this.client.blobExists(this.clientName, this.locMode, container, blob); + return service.blobExists(clientName, container, blob); } public void deleteBlob(String blob) throws URISyntaxException, StorageException { - this.client.deleteBlob(this.clientName, this.locMode, container, blob); + service.deleteBlob(clientName, container, blob); } public InputStream getInputStream(String blob) throws URISyntaxException, StorageException, IOException { - return this.client.getInputStream(this.clientName, this.locMode, container, blob); + return service.getInputStream(clientName, container, blob); } public Map listBlobsByPrefix(String keyPath, String prefix) throws URISyntaxException, StorageException { - return this.client.listBlobsByPrefix(this.clientName, this.locMode, container, keyPath, prefix); + return service.listBlobsByPrefix(clientName, container, keyPath, prefix); } public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws URISyntaxException, StorageException, FileAlreadyExistsException { - this.client.writeBlob(this.clientName, this.locMode, container, blobName, inputStream, blobSize); + service.writeBlob(this.clientName, container, blobName, inputStream, blobSize); } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 06bf10fb2e292..47b398a4c2fd3 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -21,6 +21,8 @@ import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; + +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; @@ -33,6 +35,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.snapshots.SnapshotCreationException; import org.elasticsearch.snapshots.SnapshotId; import java.io.IOException; @@ -60,19 +63,19 @@ public class AzureRepository extends BlobStoreRepository { public static final String TYPE = "azure"; public static final class Repository { - @Deprecated // Replaced by client public static final Setting ACCOUNT_SETTING = new Setting<>("account", "default", Function.identity(), Property.NodeScope, Property.Deprecated); public static final Setting CLIENT_NAME = new Setting<>("client", ACCOUNT_SETTING, Function.identity()); - public static final Setting CONTAINER_SETTING = new Setting<>("container", "elasticsearch-snapshots", Function.identity(), Property.NodeScope); public static final Setting BASE_PATH_SETTING = Setting.simpleString("base_path", Property.NodeScope); - public static final Setting LOCATION_MODE_SETTING = Setting.simpleString("location_mode", Property.NodeScope); + public static final Setting LOCATION_MODE_SETTING = new Setting<>("location_mode", + s -> LocationMode.PRIMARY_ONLY.toString(), s -> LocationMode.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope); public static final Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", MAX_CHUNK_SIZE, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE, Property.NodeScope); public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, Property.NodeScope); + public static final Setting READONLY_SETTING = Setting.boolSetting("readonly", false, Property.NodeScope); } private final AzureBlobStore blobStore; @@ -81,45 +84,32 @@ public static final class Repository { private final boolean compress; private final boolean readonly; - public AzureRepository(RepositoryMetaData metadata, Environment environment, - NamedXContentRegistry namedXContentRegistry, AzureStorageService storageService) - throws IOException, URISyntaxException, StorageException { + public AzureRepository(RepositoryMetaData metadata, Environment environment, NamedXContentRegistry namedXContentRegistry, + AzureStorageService storageService) throws IOException, URISyntaxException, StorageException { super(metadata, environment.settings(), namedXContentRegistry); - - blobStore = new AzureBlobStore(metadata, environment.settings(), storageService); - String container = Repository.CONTAINER_SETTING.get(metadata.settings()); + this.blobStore = new AzureBlobStore(metadata, environment.settings(), storageService); this.chunkSize = Repository.CHUNK_SIZE_SETTING.get(metadata.settings()); this.compress = Repository.COMPRESS_SETTING.get(metadata.settings()); - String modeStr = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); - Boolean forcedReadonly = metadata.settings().getAsBoolean("readonly", null); // If the user explicitly did not define a readonly value, we set it by ourselves depending on the location mode setting. // For secondary_only setting, the repository should be read only - if (forcedReadonly == null) { - if (Strings.hasLength(modeStr)) { - LocationMode locationMode = LocationMode.valueOf(modeStr.toUpperCase(Locale.ROOT)); - this.readonly = locationMode == LocationMode.SECONDARY_ONLY; - } else { - this.readonly = false; - } + if (Repository.READONLY_SETTING.exists(metadata.settings())) { + this.readonly = Repository.READONLY_SETTING.get(metadata.settings()); } else { - readonly = forcedReadonly; + this.readonly = this.blobStore.getLocationMode() == LocationMode.SECONDARY_ONLY; } - - String basePath = Repository.BASE_PATH_SETTING.get(metadata.settings()); - + final String basePath = Strings.trimLeadingCharacter(Repository.BASE_PATH_SETTING.get(metadata.settings()), '/'); if (Strings.hasLength(basePath)) { // Remove starting / if any - basePath = Strings.trimLeadingCharacter(basePath, '/'); BlobPath path = new BlobPath(); - for(String elem : basePath.split("/")) { + for(final String elem : basePath.split("/")) { path = path.add(elem); } this.basePath = path; } else { this.basePath = BlobPath.cleanPath(); } - logger.debug("using container [{}], chunk_size [{}], compress [{}], base_path [{}]", - container, chunkSize, compress, basePath); + logger.debug((org.apache.logging.log4j.util.Supplier) () -> new ParameterizedMessage( + "using container [{}], chunk_size [{}], compress [{}], base_path [{}]", blobStore, chunkSize, compress, basePath)); } /** @@ -153,9 +143,13 @@ protected ByteSizeValue chunkSize() { @Override public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData clusterMetadata) { - if (blobStore.doesContainerExist() == false) { - throw new IllegalArgumentException("The bucket [" + blobStore + "] does not exist. Please create it before " + - " creating an azure snapshot repository backed by it."); + try { + if (blobStore.containerExist() == false) { + throw new IllegalArgumentException("The bucket [" + blobStore + "] does not exist. Please create it before " + + " creating an azure snapshot repository backed by it."); + } + } catch (URISyntaxException | StorageException e) { + throw new SnapshotCreationException(metadata.name(), snapshotId, e); } super.initializeSnapshot(snapshotId, indices, clusterMetadata); } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java index c0126cb8df065..f2702b139a69d 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java @@ -21,12 +21,13 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; - import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -35,24 +36,20 @@ /** * A plugin to add a repository type that writes to and from the Azure cloud storage service. */ -public class AzureRepositoryPlugin extends Plugin implements RepositoryPlugin { - - private final Map clientsSettings; +public class AzureRepositoryPlugin extends Plugin implements RepositoryPlugin, ReloadablePlugin { - // overridable for tests - protected AzureStorageService createStorageService(Settings settings) { - return new AzureStorageServiceImpl(settings, clientsSettings); - } + // protected for testing + final AzureStorageService azureStoreService; public AzureRepositoryPlugin(Settings settings) { // eagerly load client settings so that secure settings are read - clientsSettings = AzureStorageSettings.load(settings); + this.azureStoreService = new AzureStorageServiceImpl(settings); } @Override public Map getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry) { return Collections.singletonMap(AzureRepository.TYPE, - (metadata) -> new AzureRepository(metadata, env, namedXContentRegistry, createStorageService(env.settings()))); + (metadata) -> new AzureRepository(metadata, env, namedXContentRegistry, azureStoreService)); } @Override @@ -67,4 +64,14 @@ public List> getSettings() { AzureStorageSettings.PROXY_PORT_SETTING ); } + + @Override + public void reload(Settings settings) { + // secure settings should be readable + final Map clientsSettings = AzureStorageSettings.load(settings); + if (clientsSettings.isEmpty()) { + throw new SettingsException("If you want to use an azure repository, you need to define a client configuration."); + } + azureStoreService.refreshAndClearCache(clientsSettings); + } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java index 1c2ca71fe7887..272c550f1d723 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java @@ -19,9 +19,12 @@ package org.elasticsearch.repositories.azure; -import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.OperationContext; import com.microsoft.azure.storage.StorageException; +import com.microsoft.azure.storage.blob.CloudBlobClient; + import org.elasticsearch.common.blobstore.BlobMetaData; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -30,6 +33,7 @@ import java.net.URISyntaxException; import java.nio.file.FileAlreadyExistsException; import java.util.Map; +import java.util.function.Supplier; /** * Azure Storage Service interface @@ -37,29 +41,46 @@ */ public interface AzureStorageService { + /** + * Creates a {@code CloudBlobClient} on each invocation using the current client + * settings. CloudBlobClient is not thread safe and the settings can change, + * therefore the instance is not cache-able and should only be reused inside a + * thread for logically coupled ops. The {@code OperationContext} is used to + * specify the proxy, but a new context is *required* for each call. + */ + Tuple> client(String clientName); + + /** + * Updates settings for building clients. Any client cache is cleared. Future + * client requests will use the new refreshed settings. + * + * @param clientsSettings the settings for new clients + * @return the old settings + */ + Map refreshAndClearCache(Map clientsSettings); + ByteSizeValue MIN_CHUNK_SIZE = new ByteSizeValue(1, ByteSizeUnit.BYTES); ByteSizeValue MAX_CHUNK_SIZE = new ByteSizeValue(64, ByteSizeUnit.MB); - boolean doesContainerExist(String account, LocationMode mode, String container); + boolean doesContainerExist(String account, String container) throws URISyntaxException, StorageException; - void removeContainer(String account, LocationMode mode, String container) throws URISyntaxException, StorageException; + void removeContainer(String account, String container) throws URISyntaxException, StorageException; - void createContainer(String account, LocationMode mode, String container) throws URISyntaxException, StorageException; + void createContainer(String account, String container) throws URISyntaxException, StorageException; - void deleteFiles(String account, LocationMode mode, String container, String path) throws URISyntaxException, StorageException; + void deleteFiles(String account, String container, String path) throws URISyntaxException, StorageException; - boolean blobExists(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException; + boolean blobExists(String account, String container, String blob) throws URISyntaxException, StorageException; - void deleteBlob(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException; + void deleteBlob(String account, String container, String blob) throws URISyntaxException, StorageException; - InputStream getInputStream(String account, LocationMode mode, String container, String blob) - throws URISyntaxException, StorageException, IOException; + InputStream getInputStream(String account, String container, String blob) throws URISyntaxException, StorageException, IOException; - Map listBlobsByPrefix(String account, LocationMode mode, String container, String keyPath, String prefix) - throws URISyntaxException, StorageException; + Map listBlobsByPrefix(String account, String container, String keyPath, String prefix) + throws URISyntaxException, StorageException; - void writeBlob(String account, LocationMode mode, String container, String blobName, InputStream inputStream, long blobSize) throws - URISyntaxException, StorageException, FileAlreadyExistsException; + void writeBlob(String account, String container, String blobName, InputStream inputStream, long blobSize) + throws URISyntaxException, StorageException, FileAlreadyExistsException; static InputStream giveSocketPermissionsToStream(InputStream stream) { return new InputStream() { diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageServiceImpl.java index 9f059eaca11c6..e3f56323f3cbf 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageServiceImpl.java @@ -21,7 +21,6 @@ import com.microsoft.azure.storage.AccessCondition; import com.microsoft.azure.storage.CloudStorageAccount; -import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.OperationContext; import com.microsoft.azure.storage.RetryExponentialRetry; import com.microsoft.azure.storage.RetryPolicy; @@ -36,164 +35,133 @@ import com.microsoft.azure.storage.blob.DeleteSnapshotsOption; import com.microsoft.azure.storage.blob.ListBlobItem; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.repositories.RepositoryException; import java.io.InputStream; import java.net.HttpURLConnection; import java.net.URI; import java.net.URISyntaxException; +import java.security.InvalidKeyException; import java.nio.file.FileAlreadyExistsException; -import java.util.Collections; import java.util.EnumSet; -import java.util.HashMap; import java.util.Map; +import java.util.function.Supplier; + +import static java.util.Collections.emptyMap; public class AzureStorageServiceImpl extends AbstractComponent implements AzureStorageService { - final Map storageSettings; - final Map clients; + // 'package' for testing + volatile Map storageSettings = emptyMap(); - public AzureStorageServiceImpl(Settings settings, Map storageSettings) { + public AzureStorageServiceImpl(Settings settings) { super(settings); - if (storageSettings.isEmpty()) { - // If someone did not register any settings, they basically can't use the plugin - throw new IllegalArgumentException("If you want to use an azure repository, you need to define a client configuration."); - } - this.storageSettings = storageSettings; - this.clients = createClients(storageSettings); + // eagerly load client settings so that secure settings are read + final Map clientsSettings = AzureStorageSettings.load(settings); + refreshAndClearCache(clientsSettings); } - private Map createClients(final Map storageSettings) { - final Map clients = new HashMap<>(); - for (Map.Entry azureStorageEntry : storageSettings.entrySet()) { - final String clientName = azureStorageEntry.getKey(); - final AzureStorageSettings clientSettings = azureStorageEntry.getValue(); - try { - logger.trace("creating new Azure storage client with name [{}]", clientName); - String storageConnectionString = - "DefaultEndpointsProtocol=https;" - + "AccountName=" + clientSettings.getAccount() + ";" - + "AccountKey=" + clientSettings.getKey(); - - final String endpointSuffix = clientSettings.getEndpointSuffix(); - if (Strings.hasLength(endpointSuffix)) { - storageConnectionString += ";EndpointSuffix=" + endpointSuffix; - } - // Retrieve storage account from connection-string. - CloudStorageAccount storageAccount = CloudStorageAccount.parse(storageConnectionString); - - // Create the blob client. - CloudBlobClient client = storageAccount.createCloudBlobClient(); - - // Register the client - clients.put(clientSettings.getAccount(), client); - } catch (Exception e) { - logger.error(() -> new ParameterizedMessage("Can not create azure storage client [{}]", clientName), e); - } - } - return Collections.unmodifiableMap(clients); - } - - CloudBlobClient getSelectedClient(String clientName, LocationMode mode) { - logger.trace("selecting a client named [{}], mode [{}]", clientName, mode.name()); - AzureStorageSettings azureStorageSettings = this.storageSettings.get(clientName); + @Override + public Tuple> client(String clientName) { + final AzureStorageSettings azureStorageSettings = this.storageSettings.get(clientName); if (azureStorageSettings == null) { - throw new IllegalArgumentException("Unable to find client with name [" + clientName + "]"); + throw new SettingsException("Unable to find client with name [" + clientName + "]"); } - - CloudBlobClient client = this.clients.get(azureStorageSettings.getAccount()); - if (client == null) { - throw new IllegalArgumentException("No account defined for client with name [" + clientName + "]"); + try { + return new Tuple<>(buildClient(azureStorageSettings), () -> buildOperationContext(azureStorageSettings)); + } catch (InvalidKeyException | URISyntaxException | IllegalArgumentException e) { + throw new SettingsException("Invalid azure client settings with name [" + clientName + "]", e); } + } - // NOTE: for now, just set the location mode in case it is different; - // only one mode per storage clientName can be active at a time - client.getDefaultRequestOptions().setLocationMode(mode); - - // Set timeout option if the user sets cloud.azure.storage.timeout or cloud.azure.storage.xxx.timeout (it's negative by default) - if (azureStorageSettings.getTimeout().getSeconds() > 0) { - try { - int timeout = (int) azureStorageSettings.getTimeout().getMillis(); - client.getDefaultRequestOptions().setTimeoutIntervalInMs(timeout); - } catch (ClassCastException e) { - throw new IllegalArgumentException("Can not convert [" + azureStorageSettings.getTimeout() + - "]. It can not be longer than 2,147,483,647ms."); + protected CloudBlobClient buildClient(AzureStorageSettings azureStorageSettings) throws InvalidKeyException, URISyntaxException { + final CloudBlobClient client = createClient(azureStorageSettings); + // Set timeout option if the user sets cloud.azure.storage.timeout or + // cloud.azure.storage.xxx.timeout (it's negative by default) + final long timeout = azureStorageSettings.getTimeout().getMillis(); + if (timeout > 0) { + if (timeout > Integer.MAX_VALUE) { + throw new IllegalArgumentException("Timeout [" + azureStorageSettings.getTimeout() + "] exceeds 2,147,483,647ms."); } + client.getDefaultRequestOptions().setTimeoutIntervalInMs((int) timeout); } - // We define a default exponential retry policy - client.getDefaultRequestOptions().setRetryPolicyFactory( - new RetryExponentialRetry(RetryPolicy.DEFAULT_CLIENT_BACKOFF, azureStorageSettings.getMaxRetries())); - + client.getDefaultRequestOptions() + .setRetryPolicyFactory(new RetryExponentialRetry(RetryPolicy.DEFAULT_CLIENT_BACKOFF, azureStorageSettings.getMaxRetries())); + client.getDefaultRequestOptions().setLocationMode(azureStorageSettings.getLocationMode()); return client; } - private OperationContext generateOperationContext(String clientName) { - OperationContext context = new OperationContext(); - AzureStorageSettings azureStorageSettings = this.storageSettings.get(clientName); - - if (azureStorageSettings.getProxy() != null) { - context.setProxy(azureStorageSettings.getProxy()); - } + protected CloudBlobClient createClient(AzureStorageSettings azureStorageSettings) throws InvalidKeyException, URISyntaxException { + final String connectionString = azureStorageSettings.buildConnectionString(); + return CloudStorageAccount.parse(connectionString).createCloudBlobClient(); + } + protected OperationContext buildOperationContext(AzureStorageSettings azureStorageSettings) { + final OperationContext context = new OperationContext(); + context.setProxy(azureStorageSettings.getProxy()); return context; } @Override - public boolean doesContainerExist(String account, LocationMode mode, String container) { - try { - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); - return SocketAccess.doPrivilegedException(() -> blobContainer.exists(null, null, generateOperationContext(account))); - } catch (Exception e) { - logger.error("can not access container [{}]", container); - } - return false; + public Map refreshAndClearCache(Map clientsSettings) { + final Map prevSettings = this.storageSettings; + this.storageSettings = MapBuilder.newMapBuilder(clientsSettings).immutableMap(); + // clients are built lazily by {@link client(String)} + return prevSettings; + } + + @Override + public boolean doesContainerExist(String account, String container) throws URISyntaxException, StorageException { + final Tuple> client = client(account); + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + return SocketAccess.doPrivilegedException(() -> blobContainer.exists(null, null, client.v2().get())); } @Override - public void removeContainer(String account, LocationMode mode, String container) throws URISyntaxException, StorageException { - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); - logger.trace("removing container [{}]", container); - SocketAccess.doPrivilegedException(() -> blobContainer.deleteIfExists(null, null, generateOperationContext(account))); + public void removeContainer(String account, String container) throws URISyntaxException, StorageException { + final Tuple> client = client(account); + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + logger.trace(() -> new ParameterizedMessage("removing container [{}]", container)); + SocketAccess.doPrivilegedException(() -> blobContainer.deleteIfExists(null, null, client.v2().get())); } @Override - public void createContainer(String account, LocationMode mode, String container) throws URISyntaxException, StorageException { + public void createContainer(String account, String container) throws URISyntaxException, StorageException { try { - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); - logger.trace("creating container [{}]", container); - SocketAccess.doPrivilegedException(() -> blobContainer.createIfNotExists(null, null, generateOperationContext(account))); - } catch (IllegalArgumentException e) { - logger.trace((Supplier) () -> new ParameterizedMessage("fails creating container [{}]", container), e); + final Tuple> client = client(account); + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + logger.trace(() -> new ParameterizedMessage("creating container [{}]", container)); + SocketAccess.doPrivilegedException(() -> blobContainer.createIfNotExists(null, null, client.v2().get())); + } catch (final IllegalArgumentException e) { + logger.trace(() -> new ParameterizedMessage("failed creating container [{}]", container), e); throw new RepositoryException(container, e.getMessage(), e); } } @Override - public void deleteFiles(String account, LocationMode mode, String container, String path) throws URISyntaxException, StorageException { - logger.trace("delete files container [{}], path [{}]", container, path); - - // Container name must be lower case. - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); + public void deleteFiles(String account, String container, String path) throws URISyntaxException, StorageException { + final Tuple> client = client(account); + // container name must be lower case. + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + logger.trace(() -> new ParameterizedMessage("delete files container [{}], path [{}]", container, path)); SocketAccess.doPrivilegedVoidException(() -> { if (blobContainer.exists()) { - // We list the blobs using a flat blob listing mode - for (ListBlobItem blobItem : blobContainer.listBlobs(path, true, EnumSet.noneOf(BlobListingDetails.class), null, - generateOperationContext(account))) { - String blobName = blobNameFromUri(blobItem.getUri()); - logger.trace("removing blob [{}] full URI was [{}]", blobName, blobItem.getUri()); - deleteBlob(account, mode, container, blobName); + // list the blobs using a flat blob listing mode + for (final ListBlobItem blobItem : blobContainer.listBlobs(path, true, EnumSet.noneOf(BlobListingDetails.class), null, + client.v2().get())) { + final String blobName = blobNameFromUri(blobItem.getUri()); + logger.trace(() -> new ParameterizedMessage("removing blob [{}] full URI was [{}]", blobName, blobItem.getUri())); + // don't call {@code #deleteBlob}, use the same client + final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blobName); + azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, client.v2().get()); } } }); @@ -205,85 +173,82 @@ public void deleteFiles(String account, LocationMode mode, String container, Str * @param uri URI to parse * @return The blob name relative to the container */ - public static String blobNameFromUri(URI uri) { - String path = uri.getPath(); - + static String blobNameFromUri(URI uri) { + final String path = uri.getPath(); // We remove the container name from the path // The 3 magic number cames from the fact if path is /container/path/to/myfile // First occurrence is empty "/" // Second occurrence is "container // Last part contains "path/to/myfile" which is what we want to get - String[] splits = path.split("/", 3); - + final String[] splits = path.split("/", 3); // We return the remaining end of the string return splits[2]; } @Override - public boolean blobExists(String account, LocationMode mode, String container, String blob) - throws URISyntaxException, StorageException { + public boolean blobExists(String account, String container, String blob) + throws URISyntaxException, StorageException { // Container name must be lower case. - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); - if (SocketAccess.doPrivilegedException(() -> blobContainer.exists(null, null, generateOperationContext(account)))) { - CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob); - return SocketAccess.doPrivilegedException(() -> azureBlob.exists(null, null, generateOperationContext(account))); - } - - return false; + final Tuple> client = client(account); + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + return SocketAccess.doPrivilegedException(() -> { + if (blobContainer.exists(null, null, client.v2().get())) { + final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob); + return azureBlob.exists(null, null, client.v2().get()); + } + return false; + }); } @Override - public void deleteBlob(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException { - logger.trace("delete blob for container [{}], blob [{}]", container, blob); - + public void deleteBlob(String account, String container, String blob) throws URISyntaxException, StorageException { + final Tuple> client = client(account); // Container name must be lower case. - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); - if (SocketAccess.doPrivilegedException(() -> blobContainer.exists(null, null, generateOperationContext(account)))) { - logger.trace("container [{}]: blob [{}] found. removing.", container, blob); - CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob); - SocketAccess.doPrivilegedVoidException(() -> azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, - generateOperationContext(account))); - } + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + logger.trace(() -> new ParameterizedMessage("delete blob for container [{}], blob [{}]", container, blob)); + SocketAccess.doPrivilegedVoidException(() -> { + if (blobContainer.exists(null, null, client.v2().get())) { + final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob); + logger.trace(() -> new ParameterizedMessage("container [{}]: blob [{}] found. removing.", container, blob)); + azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, client.v2().get()); + } + }); } @Override - public InputStream getInputStream(String account, LocationMode mode, String container, String blob) throws URISyntaxException, + public InputStream getInputStream(String account, String container, String blob) throws URISyntaxException, StorageException { - logger.trace("reading container [{}], blob [{}]", container, blob); - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlockBlob blockBlobReference = client.getContainerReference(container).getBlockBlobReference(blob); - BlobInputStream is = SocketAccess.doPrivilegedException(() -> - blockBlobReference.openInputStream(null, null, generateOperationContext(account))); + final Tuple> client = client(account); + final CloudBlockBlob blockBlobReference = client.v1().getContainerReference(container).getBlockBlobReference(blob); + logger.trace(() -> new ParameterizedMessage("reading container [{}], blob [{}]", container, blob)); + final BlobInputStream is = SocketAccess.doPrivilegedException(() -> + blockBlobReference.openInputStream(null, null, client.v2().get())); return AzureStorageService.giveSocketPermissionsToStream(is); } @Override - public Map listBlobsByPrefix(String account, LocationMode mode, String container, String keyPath, String prefix) + public Map listBlobsByPrefix(String account, String container, String keyPath, String prefix) throws URISyntaxException, StorageException { // NOTE: this should be here: if (prefix == null) prefix = ""; // however, this is really inefficient since deleteBlobsByPrefix enumerates everything and // then does a prefix match on the result; it should just call listBlobsByPrefix with the prefix! - - logger.debug("listing container [{}], keyPath [{}], prefix [{}]", container, keyPath, prefix); - MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); - EnumSet enumBlobListingDetails = EnumSet.of(BlobListingDetails.METADATA); - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); + final MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); + final EnumSet enumBlobListingDetails = EnumSet.of(BlobListingDetails.METADATA); + final Tuple> client = client(account); + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + logger.trace(() -> new ParameterizedMessage("listing container [{}], keyPath [{}], prefix [{}]", container, keyPath, prefix)); SocketAccess.doPrivilegedVoidException(() -> { if (blobContainer.exists()) { - for (ListBlobItem blobItem : blobContainer.listBlobs(keyPath + (prefix == null ? "" : prefix), false, - enumBlobListingDetails, null, generateOperationContext(account))) { - URI uri = blobItem.getUri(); - logger.trace("blob url [{}]", uri); - + for (final ListBlobItem blobItem : blobContainer.listBlobs(keyPath + (prefix == null ? "" : prefix), false, + enumBlobListingDetails, null, client.v2().get())) { + final URI uri = blobItem.getUri(); + logger.trace(() -> new ParameterizedMessage("blob url [{}]", uri)); // uri.getPath is of the form /container/keyPath.* and we want to strip off the /container/ // this requires 1 + container.length() + 1, with each 1 corresponding to one of the / - String blobPath = uri.getPath().substring(1 + container.length() + 1); - BlobProperties properties = ((CloudBlockBlob) blobItem).getProperties(); - String name = blobPath.substring(keyPath.length()); - logger.trace("blob url [{}], name [{}], size [{}]", uri, name, properties.getLength()); + final String blobPath = uri.getPath().substring(1 + container.length() + 1); + final BlobProperties properties = ((CloudBlockBlob) blobItem).getProperties(); + final String name = blobPath.substring(keyPath.length()); + logger.trace(() -> new ParameterizedMessage("blob url [{}], name [{}], size [{}]", uri, name, properties.getLength())); blobsBuilder.put(name, new PlainBlobMetaData(name, properties.getLength())); } } @@ -292,22 +257,23 @@ enumBlobListingDetails, null, generateOperationContext(account))) { } @Override - public void writeBlob(String account, LocationMode mode, String container, String blobName, InputStream inputStream, long blobSize) + public void writeBlob(String account, String container, String blobName, InputStream inputStream, long blobSize) throws URISyntaxException, StorageException, FileAlreadyExistsException { - logger.trace("writeBlob({}, stream, {})", blobName, blobSize); - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); - CloudBlockBlob blob = blobContainer.getBlockBlobReference(blobName); + logger.trace(() -> new ParameterizedMessage("writeBlob({}, stream, {})", blobName, blobSize)); + final Tuple> client = client(account); + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + final CloudBlockBlob blob = blobContainer.getBlockBlobReference(blobName); try { SocketAccess.doPrivilegedVoidException(() -> blob.upload(inputStream, blobSize, AccessCondition.generateIfNotExistsCondition(), - null, generateOperationContext(account))); - } catch (StorageException se) { + null, client.v2().get())); + } catch (final StorageException se) { if (se.getHttpStatusCode() == HttpURLConnection.HTTP_CONFLICT && StorageErrorCodeStrings.BLOB_ALREADY_EXISTS.equals(se.getErrorCode())) { throw new FileAlreadyExistsException(blobName, null, se.getMessage()); } throw se; } - logger.trace("writeBlob({}, stream, {}) - done", blobName, blobSize); + logger.trace(() -> new ParameterizedMessage("writeBlob({}, stream, {}) - done", blobName, blobSize)); } + } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java index e360558933cc1..c4e4c1439e45f 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java @@ -19,8 +19,10 @@ package org.elasticsearch.repositories.azure; +import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.RetryPolicy; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; @@ -29,7 +31,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.TimeValue; - import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Proxy; @@ -39,7 +40,7 @@ import java.util.Locale; import java.util.Map; -public final class AzureStorageSettings { +final class AzureStorageSettings { // prefix for azure client settings private static final String AZURE_CLIENT_PREFIX_KEY = "azure.client."; @@ -86,22 +87,33 @@ public final class AzureStorageSettings { private final TimeValue timeout; private final int maxRetries; private final Proxy proxy; + private final LocationMode locationMode; + // copy-constructor + private AzureStorageSettings(String account, String key, String endpointSuffix, TimeValue timeout, int maxRetries, Proxy proxy, + LocationMode locationMode) { + this.account = account; + this.key = key; + this.endpointSuffix = endpointSuffix; + this.timeout = timeout; + this.maxRetries = maxRetries; + this.proxy = proxy; + this.locationMode = locationMode; + } - public AzureStorageSettings(String account, String key, String endpointSuffix, TimeValue timeout, int maxRetries, + AzureStorageSettings(String account, String key, String endpointSuffix, TimeValue timeout, int maxRetries, Proxy.Type proxyType, String proxyHost, Integer proxyPort) { this.account = account; this.key = key; this.endpointSuffix = endpointSuffix; this.timeout = timeout; this.maxRetries = maxRetries; - // Register the proxy if we have any // Validate proxy settings - if (proxyType.equals(Proxy.Type.DIRECT) && (proxyPort != 0 || Strings.hasText(proxyHost))) { + if (proxyType.equals(Proxy.Type.DIRECT) && ((proxyPort != 0) || Strings.hasText(proxyHost))) { throw new SettingsException("Azure Proxy port or host have been set but proxy type is not defined."); } - if (proxyType.equals(Proxy.Type.DIRECT) == false && (proxyPort == 0 || Strings.isEmpty(proxyHost))) { + if ((proxyType.equals(Proxy.Type.DIRECT) == false) && ((proxyPort == 0) || Strings.isEmpty(proxyHost))) { throw new SettingsException("Azure Proxy type has been set but proxy host or port is not defined."); } @@ -110,10 +122,11 @@ public AzureStorageSettings(String account, String key, String endpointSuffix, T } else { try { proxy = new Proxy(proxyType, new InetSocketAddress(InetAddress.getByName(proxyHost), proxyPort)); - } catch (UnknownHostException e) { + } catch (final UnknownHostException e) { throw new SettingsException("Azure proxy host is unknown.", e); } } + this.locationMode = LocationMode.PRIMARY_ONLY; } public String getKey() { @@ -140,37 +153,55 @@ public Proxy getProxy() { return proxy; } + public String buildConnectionString() { + final StringBuilder connectionStringBuilder = new StringBuilder(); + connectionStringBuilder.append("DefaultEndpointsProtocol=https") + .append(";AccountName=") + .append(account) + .append(";AccountKey=") + .append(key); + if (Strings.hasText(endpointSuffix)) { + connectionStringBuilder.append(";EndpointSuffix=").append(endpointSuffix); + } + return connectionStringBuilder.toString(); + } + + public LocationMode getLocationMode() { + return locationMode; + } + @Override public String toString() { final StringBuilder sb = new StringBuilder("AzureStorageSettings{"); - sb.append(", account='").append(account).append('\''); + sb.append("account='").append(account).append('\''); sb.append(", key='").append(key).append('\''); sb.append(", timeout=").append(timeout); sb.append(", endpointSuffix='").append(endpointSuffix).append('\''); sb.append(", maxRetries=").append(maxRetries); sb.append(", proxy=").append(proxy); + sb.append(", locationMode='").append(locationMode).append('\''); sb.append('}'); return sb.toString(); } /** - * Parses settings and read all settings available under azure.client.* + * Parse and read all settings available under the azure.client.* namespace * @param settings settings to parse * @return All the named configurations */ public static Map load(Settings settings) { // Get the list of existing named configurations - Map storageSettings = new HashMap<>(); - for (String clientName : ACCOUNT_SETTING.getNamespaces(settings)) { + final Map storageSettings = new HashMap<>(); + for (final String clientName : ACCOUNT_SETTING.getNamespaces(settings)) { storageSettings.put(clientName, getClientSettings(settings, clientName)); } - - if (storageSettings.containsKey("default") == false && storageSettings.isEmpty() == false) { + if (false == storageSettings.containsKey("default") && false == storageSettings.isEmpty()) { // in case no setting named "default" has been set, let's define our "default" // as the first named config we get - AzureStorageSettings defaultSettings = storageSettings.values().iterator().next(); + final AzureStorageSettings defaultSettings = storageSettings.values().iterator().next(); storageSettings.put("default", defaultSettings); } + assert storageSettings.containsKey("default") || storageSettings.isEmpty() : "always have 'default' if any"; return Collections.unmodifiableMap(storageSettings); } @@ -191,13 +222,25 @@ static AzureStorageSettings getClientSettings(Settings settings, String clientNa private static T getConfigValue(Settings settings, String clientName, Setting.AffixSetting clientSetting) { - Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); + final Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); return concreteSetting.get(settings); } public static T getValue(Settings settings, String groupName, Setting setting) { - Setting.AffixKey k = (Setting.AffixKey) setting.getRawKey(); - String fullKey = k.toConcreteKey(groupName).toString(); + final Setting.AffixKey k = (Setting.AffixKey) setting.getRawKey(); + final String fullKey = k.toConcreteKey(groupName).toString(); return setting.getConcreteSetting(fullKey).get(settings); } + + static Map overrideLocationMode(Map clientsSettings, + LocationMode locationMode) { + final MapBuilder mapBuilder = new MapBuilder<>(); + for (final Map.Entry entry : clientsSettings.entrySet()) { + final AzureStorageSettings azureSettings = new AzureStorageSettings(entry.getValue().account, entry.getValue().key, + entry.getValue().endpointSuffix, entry.getValue().timeout, entry.getValue().maxRetries, entry.getValue().proxy, + locationMode); + mapBuilder.put(entry.getKey(), azureSettings); + } + return mapBuilder.immutableMap(); + } } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java index 26b02278eddc0..639905042cf87 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java @@ -34,6 +34,7 @@ import java.net.URISyntaxException; import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; public class AzureRepositorySettingsTests extends ESTestCase { @@ -44,7 +45,7 @@ private AzureRepository azureRepository(Settings settings) throws StorageExcepti .put(settings) .build(); return new AzureRepository(new RepositoryMetaData("foo", "azure", internalSettings), - TestEnvironment.newEnvironment(internalSettings), NamedXContentRegistry.EMPTY, null); + TestEnvironment.newEnvironment(internalSettings), NamedXContentRegistry.EMPTY, mock(AzureStorageService.class)); } public void testReadonlyDefault() throws StorageException, IOException, URISyntaxException { diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java index 439a9d567f1a4..10163bb2f31df 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java @@ -19,9 +19,7 @@ package org.elasticsearch.repositories.azure; - import com.carrotsearch.randomizedtesting.RandomizedTest; -import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; @@ -77,9 +75,9 @@ private static Settings.Builder generateMockSettings() { return Settings.builder().setSecureSettings(generateMockSecureSettings()); } + @SuppressWarnings("resource") private static AzureStorageService getAzureStorageService() { - return new AzureStorageServiceImpl(generateMockSettings().build(), - AzureStorageSettings.load(generateMockSettings().build())); + return new AzureRepositoryPlugin(generateMockSettings().build()).azureStoreService; } @Override @@ -94,7 +92,7 @@ private static String getContainerName() { * there mustn't be a hyphen between the 2 concatenated numbers * (can't have 2 consecutives hyphens on Azure containers) */ - String testName = "snapshot-itest-" + final String testName = "snapshot-itest-" .concat(RandomizedTest.getContext().getRunnerSeedAsString().toLowerCase(Locale.ROOT)); return testName.contains(" ") ? Strings.split(testName, " ")[0] : testName; } @@ -123,7 +121,7 @@ private static void createTestContainer(String containerName) throws Exception { // It could happen that we run this test really close to a previous one // so we might need some time to be able to create the container assertBusy(() -> { - getAzureStorageService().createContainer("default", LocationMode.PRIMARY_ONLY, containerName); + getAzureStorageService().createContainer("default", containerName); }, 30, TimeUnit.SECONDS); } @@ -132,7 +130,7 @@ private static void createTestContainer(String containerName) throws Exception { * @param containerName container name to use */ private static void removeTestContainer(String containerName) throws URISyntaxException, StorageException { - getAzureStorageService().removeContainer("default", LocationMode.PRIMARY_ONLY, containerName); + getAzureStorageService().removeContainer("default", containerName); } @Override @@ -141,7 +139,7 @@ protected Collection> nodePlugins() { } private String getRepositoryPath() { - String testName = "it-" + getTestName(); + final String testName = "it-" + getTestName(); return testName.contains(" ") ? Strings.split(testName, " ")[0] : testName; } @@ -159,21 +157,21 @@ public Settings indexSettings() { public final void wipeAzureRepositories() { try { client().admin().cluster().prepareDeleteRepository("*").get(); - } catch (RepositoryMissingException ignored) { + } catch (final RepositoryMissingException ignored) { } } public void testMultipleRepositories() { - Client client = client(); + final Client client = client(); logger.info("--> creating azure repository with path [{}]", getRepositoryPath()); - PutRepositoryResponse putRepositoryResponse1 = client.admin().cluster().preparePutRepository("test-repo1") + final PutRepositoryResponse putRepositoryResponse1 = client.admin().cluster().preparePutRepository("test-repo1") .setType("azure").setSettings(Settings.builder() .put(Repository.CONTAINER_SETTING.getKey(), getContainerName().concat("-1")) .put(Repository.BASE_PATH_SETTING.getKey(), getRepositoryPath()) .put(Repository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(1000, 10000), ByteSizeUnit.BYTES) ).get(); assertThat(putRepositoryResponse1.isAcknowledged(), equalTo(true)); - PutRepositoryResponse putRepositoryResponse2 = client.admin().cluster().preparePutRepository("test-repo2") + final PutRepositoryResponse putRepositoryResponse2 = client.admin().cluster().preparePutRepository("test-repo2") .setType("azure").setSettings(Settings.builder() .put(Repository.CONTAINER_SETTING.getKey(), getContainerName().concat("-2")) .put(Repository.BASE_PATH_SETTING.getKey(), getRepositoryPath()) @@ -194,14 +192,14 @@ public void testMultipleRepositories() { assertThat(client.prepareSearch("test-idx-2").setSize(0).get().getHits().getTotalHits(), equalTo(100L)); logger.info("--> snapshot 1"); - CreateSnapshotResponse createSnapshotResponse1 = client.admin().cluster().prepareCreateSnapshot("test-repo1", "test-snap") + final CreateSnapshotResponse createSnapshotResponse1 = client.admin().cluster().prepareCreateSnapshot("test-repo1", "test-snap") .setWaitForCompletion(true).setIndices("test-idx-1").get(); assertThat(createSnapshotResponse1.getSnapshotInfo().successfulShards(), greaterThan(0)); assertThat(createSnapshotResponse1.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse1.getSnapshotInfo().totalShards())); logger.info("--> snapshot 2"); - CreateSnapshotResponse createSnapshotResponse2 = client.admin().cluster().prepareCreateSnapshot("test-repo2", "test-snap") + final CreateSnapshotResponse createSnapshotResponse2 = client.admin().cluster().prepareCreateSnapshot("test-repo2", "test-snap") .setWaitForCompletion(true).setIndices("test-idx-2").get(); assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), greaterThan(0)); assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), @@ -216,7 +214,7 @@ public void testMultipleRepositories() { logger.info("--> delete indices"); cluster().wipeIndices("test-idx-1", "test-idx-2"); logger.info("--> restore one index after deletion from snapshot 1"); - RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin().cluster().prepareRestoreSnapshot("test-repo1", "test-snap") + final RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin().cluster().prepareRestoreSnapshot("test-repo1", "test-snap") .setWaitForCompletion(true).setIndices("test-idx-1").get(); assertThat(restoreSnapshotResponse1.getRestoreInfo().totalShards(), greaterThan(0)); ensureGreen(); @@ -226,7 +224,7 @@ public void testMultipleRepositories() { assertThat(clusterState.getMetaData().hasIndex("test-idx-2"), equalTo(false)); logger.info("--> restore other index after deletion from snapshot 2"); - RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin().cluster().prepareRestoreSnapshot("test-repo2", "test-snap") + final RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin().cluster().prepareRestoreSnapshot("test-repo2", "test-snap") .setWaitForCompletion(true).setIndices("test-idx-2").get(); assertThat(restoreSnapshotResponse2.getRestoreInfo().totalShards(), greaterThan(0)); ensureGreen(); @@ -252,7 +250,7 @@ public void testListBlobs_26() throws StorageException, URISyntaxException { } refresh(); - ClusterAdminClient client = client().admin().cluster(); + final ClusterAdminClient client = client().admin().cluster(); logger.info("--> creating azure repository without any path"); PutRepositoryResponse putRepositoryResponse = client.preparePutRepository(repositoryName).setType("azure") .setSettings(Settings.builder() @@ -300,9 +298,9 @@ public void testListBlobs_26() throws StorageException, URISyntaxException { */ public void testGetDeleteNonExistingSnapshot_28() throws StorageException, URISyntaxException { final String repositoryName="test-repo-28"; - ClusterAdminClient client = client().admin().cluster(); + final ClusterAdminClient client = client().admin().cluster(); logger.info("--> creating azure repository without any path"); - PutRepositoryResponse putRepositoryResponse = client.preparePutRepository(repositoryName).setType("azure") + final PutRepositoryResponse putRepositoryResponse = client.preparePutRepository(repositoryName).setType("azure") .setSettings(Settings.builder() .put(Repository.CONTAINER_SETTING.getKey(), getContainerName()) ).get(); @@ -311,14 +309,14 @@ public void testGetDeleteNonExistingSnapshot_28() throws StorageException, URISy try { client.prepareGetSnapshots(repositoryName).addSnapshots("nonexistingsnapshotname").get(); fail("Shouldn't be here"); - } catch (SnapshotMissingException ex) { + } catch (final SnapshotMissingException ex) { // Expected } try { client.prepareDeleteSnapshot(repositoryName, "nonexistingsnapshotname").get(); fail("Shouldn't be here"); - } catch (SnapshotMissingException ex) { + } catch (final SnapshotMissingException ex) { // Expected } } @@ -328,9 +326,9 @@ public void testGetDeleteNonExistingSnapshot_28() throws StorageException, URISy */ public void testNonExistingRepo_23() { final String repositoryName = "test-repo-test23"; - Client client = client(); + final Client client = client(); logger.info("--> creating azure repository with path [{}]", getRepositoryPath()); - PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository(repositoryName) + final PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository(repositoryName) .setType("azure").setSettings(Settings.builder() .put(Repository.CONTAINER_SETTING.getKey(), getContainerName()) .put(Repository.BASE_PATH_SETTING.getKey(), getRepositoryPath()) @@ -342,7 +340,7 @@ public void testNonExistingRepo_23() { try { client.admin().cluster().prepareRestoreSnapshot(repositoryName, "no-existing-snapshot").setWaitForCompletion(true).get(); fail("Shouldn't be here"); - } catch (SnapshotRestoreException ex) { + } catch (final SnapshotRestoreException ex) { // Expected } } @@ -356,7 +354,7 @@ public void testRemoveAndCreateContainer() throws Exception { createTestContainer(container); removeTestContainer(container); - ClusterAdminClient client = client().admin().cluster(); + final ClusterAdminClient client = client().admin().cluster(); logger.info("--> creating azure repository while container is being removed"); try { client.preparePutRepository("test-repo").setType("azure") @@ -364,7 +362,7 @@ public void testRemoveAndCreateContainer() throws Exception { .put(Repository.CONTAINER_SETTING.getKey(), container) ).get(); fail("we should get a RepositoryVerificationException"); - } catch (RepositoryVerificationException e) { + } catch (final RepositoryVerificationException e) { // Fine we expect that } } @@ -378,9 +376,9 @@ public void testRemoveAndCreateContainer() throws Exception { * @throws Exception If anything goes wrong */ public void testGeoRedundantStorage() throws Exception { - Client client = client(); + final Client client = client(); logger.info("--> creating azure primary repository"); - PutRepositoryResponse putRepositoryResponsePrimary = client.admin().cluster().preparePutRepository("primary") + final PutRepositoryResponse putRepositoryResponsePrimary = client.admin().cluster().preparePutRepository("primary") .setType("azure").setSettings(Settings.builder() .put(Repository.CONTAINER_SETTING.getKey(), getContainerName()) ).get(); @@ -394,7 +392,7 @@ public void testGeoRedundantStorage() throws Exception { assertThat(endWait - startWait, lessThanOrEqualTo(30000L)); logger.info("--> creating azure secondary repository"); - PutRepositoryResponse putRepositoryResponseSecondary = client.admin().cluster().preparePutRepository("secondary") + final PutRepositoryResponse putRepositoryResponseSecondary = client.admin().cluster().preparePutRepository("secondary") .setType("azure").setSettings(Settings.builder() .put(Repository.CONTAINER_SETTING.getKey(), getContainerName()) .put(Repository.LOCATION_MODE_SETTING.getKey(), "secondary_only") diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java index 4b111e549476c..a680af06fc655 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java @@ -19,11 +19,14 @@ package org.elasticsearch.repositories.azure; -import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.OperationContext; import com.microsoft.azure.storage.StorageException; +import com.microsoft.azure.storage.blob.CloudBlobClient; + import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.internal.io.Streams; @@ -40,6 +43,9 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Supplier; + +import static java.util.Collections.emptyMap; /** * In memory storage for unit tests @@ -53,44 +59,44 @@ public AzureStorageServiceMock() { } @Override - public boolean doesContainerExist(String account, LocationMode mode, String container) { + public boolean doesContainerExist(String account, String container) { return true; } @Override - public void removeContainer(String account, LocationMode mode, String container) { + public void removeContainer(String account, String container) { } @Override - public void createContainer(String account, LocationMode mode, String container) { + public void createContainer(String account, String container) { } @Override - public void deleteFiles(String account, LocationMode mode, String container, String path) { - final Map blobs = listBlobsByPrefix(account, mode, container, path, null); - blobs.keySet().forEach(key -> deleteBlob(account, mode, container, key)); + public void deleteFiles(String account, String container, String path) { + final Map blobs = listBlobsByPrefix(account, container, path, null); + blobs.keySet().forEach(key -> deleteBlob(account, container, key)); } @Override - public boolean blobExists(String account, LocationMode mode, String container, String blob) { + public boolean blobExists(String account, String container, String blob) { return blobs.containsKey(blob); } @Override - public void deleteBlob(String account, LocationMode mode, String container, String blob) { + public void deleteBlob(String account, String container, String blob) { blobs.remove(blob); } @Override - public InputStream getInputStream(String account, LocationMode mode, String container, String blob) throws IOException { - if (!blobExists(account, mode, container, blob)) { + public InputStream getInputStream(String account, String container, String blob) throws IOException { + if (!blobExists(account, container, blob)) { throw new NoSuchFileException("missing blob [" + blob + "]"); } return AzureStorageService.giveSocketPermissionsToStream(new PermissionRequiringInputStream(blobs.get(blob).toByteArray())); } @Override - public Map listBlobsByPrefix(String account, LocationMode mode, String container, String keyPath, String prefix) { + public Map listBlobsByPrefix(String account, String container, String keyPath, String prefix) { MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); blobs.forEach((String blobName, ByteArrayOutputStream bos) -> { final String checkBlob; @@ -108,7 +114,7 @@ public Map listBlobsByPrefix(String account, LocationMode } @Override - public void writeBlob(String account, LocationMode mode, String container, String blobName, InputStream inputStream, long blobSize) + public void writeBlob(String account, String container, String blobName, InputStream inputStream, long blobSize) throws URISyntaxException, StorageException, FileAlreadyExistsException { if (blobs.containsKey(blobName)) { throw new FileAlreadyExistsException(blobName); @@ -168,4 +174,14 @@ public synchronized int read(byte[] b, int off, int len) { return super.read(b, off, len); } } + + @Override + public Tuple> client(String clientName) { + return null; + } + + @Override + public Map refreshAndClearCache(Map clientsSettings) { + return emptyMap(); + } } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java index 447826dbf833f..3308db682fece 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.repositories.azure; -import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.RetryExponentialRetry; import com.microsoft.azure.storage.blob.CloudBlobClient; import com.microsoft.azure.storage.core.Base64; @@ -28,6 +27,7 @@ import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Proxy; @@ -35,7 +35,6 @@ import java.net.URISyntaxException; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; -import java.util.Collections; import java.util.Map; import static org.elasticsearch.repositories.azure.AzureStorageServiceImpl.blobNameFromUri; @@ -50,17 +49,10 @@ public class AzureStorageServiceTests extends ESTestCase { public void testReadSecuredSettings() { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("azure.client.azure1.account", "myaccount1"); - secureSettings.setString("azure.client.azure1.key", encodeKey("mykey1")); - secureSettings.setString("azure.client.azure2.account", "myaccount2"); - secureSettings.setString("azure.client.azure2.key", encodeKey("mykey2")); - secureSettings.setString("azure.client.azure3.account", "myaccount3"); - secureSettings.setString("azure.client.azure3.key", encodeKey("mykey3")); - Settings settings = Settings.builder().setSecureSettings(secureSettings) + final Settings settings = Settings.builder().setSecureSettings(buildSecureSettings()) .put("azure.client.azure3.endpoint_suffix", "my_endpoint_suffix").build(); - Map loadedSettings = AzureStorageSettings.load(settings); + final Map loadedSettings = AzureStorageSettings.load(settings); assertThat(loadedSettings.keySet(), containsInAnyOrder("azure1","azure2","azure3","default")); assertThat(loadedSettings.get("azure1").getEndpointSuffix(), isEmptyString()); @@ -68,95 +60,161 @@ public void testReadSecuredSettings() { assertThat(loadedSettings.get("azure3").getEndpointSuffix(), equalTo("my_endpoint_suffix")); } - public void testCreateClientWithEndpointSuffix() { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("azure.client.azure1.account", "myaccount1"); - secureSettings.setString("azure.client.azure1.key", encodeKey("mykey1")); - secureSettings.setString("azure.client.azure2.account", "myaccount2"); - secureSettings.setString("azure.client.azure2.key", encodeKey("mykey2")); - Settings settings = Settings.builder().setSecureSettings(secureSettings) + public void testCreateClientWithEndpointSuffix() throws IOException { + final Settings settings = Settings.builder().setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.endpoint_suffix", "my_endpoint_suffix").build(); - AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(settings, AzureStorageSettings.load(settings)); - CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); - assertThat(client1.getEndpoint().toString(), equalTo("https://myaccount1.blob.my_endpoint_suffix")); + try (AzureRepositoryPlugin plugin = new AzureRepositoryPlugin(settings)) { + final AzureStorageServiceImpl azureStorageService = (AzureStorageServiceImpl) plugin.azureStoreService; + final CloudBlobClient client1 = azureStorageService.client("azure1").v1(); + assertThat(client1.getEndpoint().toString(), equalTo("https://myaccount1.blob.my_endpoint_suffix")); + final CloudBlobClient client2 = azureStorageService.client("azure2").v1(); + assertThat(client2.getEndpoint().toString(), equalTo("https://myaccount2.blob.core.windows.net")); + } + } - CloudBlobClient client2 = azureStorageService.getSelectedClient("azure2", LocationMode.PRIMARY_ONLY); - assertThat(client2.getEndpoint().toString(), equalTo("https://myaccount2.blob.core.windows.net")); + public void testReinitClientSettings() throws IOException { + final MockSecureSettings secureSettings1 = new MockSecureSettings(); + secureSettings1.setString("azure.client.azure1.account", "myaccount11"); + secureSettings1.setString("azure.client.azure1.key", encodeKey("mykey11")); + secureSettings1.setString("azure.client.azure2.account", "myaccount12"); + secureSettings1.setString("azure.client.azure2.key", encodeKey("mykey12")); + final Settings settings1 = Settings.builder().setSecureSettings(secureSettings1).build(); + final MockSecureSettings secureSettings2 = new MockSecureSettings(); + secureSettings2.setString("azure.client.azure1.account", "myaccount21"); + secureSettings2.setString("azure.client.azure1.key", encodeKey("mykey21")); + secureSettings2.setString("azure.client.azure3.account", "myaccount23"); + secureSettings2.setString("azure.client.azure3.key", encodeKey("mykey23")); + final Settings settings2 = Settings.builder().setSecureSettings(secureSettings2).build(); + try (AzureRepositoryPlugin plugin = new AzureRepositoryPlugin(settings1)) { + final AzureStorageServiceImpl azureStorageService = (AzureStorageServiceImpl) plugin.azureStoreService; + final CloudBlobClient client11 = azureStorageService.client("azure1").v1(); + assertThat(client11.getEndpoint().toString(), equalTo("https://myaccount11.blob.core.windows.net")); + final CloudBlobClient client12 = azureStorageService.client("azure2").v1(); + assertThat(client12.getEndpoint().toString(), equalTo("https://myaccount12.blob.core.windows.net")); + // client 3 is missing + final SettingsException e1 = expectThrows(SettingsException.class, () -> azureStorageService.client("azure3")); + assertThat(e1.getMessage(), is("Unable to find client with name [azure3]")); + // update client settings + plugin.reload(settings2); + // old client 1 not changed + assertThat(client11.getEndpoint().toString(), equalTo("https://myaccount11.blob.core.windows.net")); + // new client 1 is changed + final CloudBlobClient client21 = azureStorageService.client("azure1").v1(); + assertThat(client21.getEndpoint().toString(), equalTo("https://myaccount21.blob.core.windows.net")); + // old client 2 not changed + assertThat(client12.getEndpoint().toString(), equalTo("https://myaccount12.blob.core.windows.net")); + // new client2 is gone + final SettingsException e2 = expectThrows(SettingsException.class, () -> azureStorageService.client("azure2")); + assertThat(e2.getMessage(), is("Unable to find client with name [azure2]")); + // client 3 emerged + final CloudBlobClient client23 = azureStorageService.client("azure3").v1(); + assertThat(client23.getEndpoint().toString(), equalTo("https://myaccount23.blob.core.windows.net")); + } } - public void testGetSelectedClientWithNoPrimaryAndSecondary() { - try { - new AzureStorageServiceImpl(Settings.EMPTY, Collections.emptyMap()); - fail("we should have raised an IllegalArgumentException"); - } catch (IllegalArgumentException e) { + public void testReinitClientEmptySettings() throws IOException { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("azure.client.azure1.account", "myaccount1"); + secureSettings.setString("azure.client.azure1.key", encodeKey("mykey11")); + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + try (AzureRepositoryPlugin plugin = new AzureRepositoryPlugin(settings)) { + final AzureStorageServiceImpl azureStorageService = (AzureStorageServiceImpl) plugin.azureStoreService; + final CloudBlobClient client11 = azureStorageService.client("azure1").v1(); + assertThat(client11.getEndpoint().toString(), equalTo("https://myaccount1.blob.core.windows.net")); + // reinit with empty settings + final SettingsException e = expectThrows(SettingsException.class, () -> plugin.reload(Settings.EMPTY)); assertThat(e.getMessage(), is("If you want to use an azure repository, you need to define a client configuration.")); + // existing client untouched + assertThat(client11.getEndpoint().toString(), equalTo("https://myaccount1.blob.core.windows.net")); + // new client also untouched + final CloudBlobClient client21 = azureStorageService.client("azure1").v1(); + assertThat(client21.getEndpoint().toString(), equalTo("https://myaccount1.blob.core.windows.net")); + } + } + + public void testReinitClientWrongSettings() throws IOException { + final MockSecureSettings secureSettings1 = new MockSecureSettings(); + secureSettings1.setString("azure.client.azure1.account", "myaccount1"); + secureSettings1.setString("azure.client.azure1.key", encodeKey("mykey11")); + final Settings settings1 = Settings.builder().setSecureSettings(secureSettings1).build(); + final MockSecureSettings secureSettings2 = new MockSecureSettings(); + secureSettings2.setString("azure.client.azure1.account", "myaccount1"); + // missing key + final Settings settings2 = Settings.builder().setSecureSettings(secureSettings2).build(); + try (AzureRepositoryPlugin plugin = new AzureRepositoryPlugin(settings1)) { + final AzureStorageServiceImpl azureStorageService = (AzureStorageServiceImpl) plugin.azureStoreService; + final CloudBlobClient client11 = azureStorageService.client("azure1").v1(); + assertThat(client11.getEndpoint().toString(), equalTo("https://myaccount1.blob.core.windows.net")); + plugin.reload(settings2); + // existing client untouched + assertThat(client11.getEndpoint().toString(), equalTo("https://myaccount1.blob.core.windows.net")); + final SettingsException e = expectThrows(SettingsException.class, () -> azureStorageService.client("azure1")); + assertThat(e.getMessage(), is("Invalid azure client settings with name [azure1]")); } } public void testGetSelectedClientNonExisting() { - AzureStorageServiceImpl azureStorageService = createAzureService(buildSettings()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - azureStorageService.getSelectedClient("azure4", LocationMode.PRIMARY_ONLY); - }); + final AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(buildSettings()); + final SettingsException e = expectThrows(SettingsException.class, () -> azureStorageService.client("azure4")); assertThat(e.getMessage(), is("Unable to find client with name [azure4]")); } public void testGetSelectedClientDefaultTimeout() { - Settings timeoutSettings = Settings.builder() + final Settings timeoutSettings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure3.timeout", "30s") .build(); - AzureStorageServiceImpl azureStorageService = createAzureService(timeoutSettings); - CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); + final AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(timeoutSettings); + final CloudBlobClient client1 = azureStorageService.client("azure1").v1(); assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), nullValue()); - CloudBlobClient client3 = azureStorageService.getSelectedClient("azure3", LocationMode.PRIMARY_ONLY); + final CloudBlobClient client3 = azureStorageService.client("azure3").v1(); assertThat(client3.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(30 * 1000)); } public void testGetSelectedClientNoTimeout() { - AzureStorageServiceImpl azureStorageService = createAzureService(buildSettings()); - CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); + final AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(buildSettings()); + final CloudBlobClient client1 = azureStorageService.client("azure1").v1(); assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(nullValue())); } public void testGetSelectedClientBackoffPolicy() { - AzureStorageServiceImpl azureStorageService = createAzureService(buildSettings()); - CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); + final AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(buildSettings()); + final CloudBlobClient client1 = azureStorageService.client("azure1").v1(); assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), is(notNullValue())); assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), instanceOf(RetryExponentialRetry.class)); } public void testGetSelectedClientBackoffPolicyNbRetries() { - Settings timeoutSettings = Settings.builder() + final Settings timeoutSettings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.max_retries", 7) .build(); - AzureStorageServiceImpl azureStorageService = createAzureService(timeoutSettings); - CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); + final AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(timeoutSettings); + final CloudBlobClient client1 = azureStorageService.client("azure1").v1(); assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), is(notNullValue())); assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), instanceOf(RetryExponentialRetry.class)); } public void testNoProxy() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .build(); - AzureStorageServiceImpl mock = createAzureService(settings); + final AzureStorageServiceImpl mock = new AzureStorageServiceImpl(settings); assertThat(mock.storageSettings.get("azure1").getProxy(), nullValue()); assertThat(mock.storageSettings.get("azure2").getProxy(), nullValue()); assertThat(mock.storageSettings.get("azure3").getProxy(), nullValue()); } public void testProxyHttp() throws UnknownHostException { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.host", "127.0.0.1") .put("azure.client.azure1.proxy.port", 8080) .put("azure.client.azure1.proxy.type", "http") .build(); - AzureStorageServiceImpl mock = createAzureService(settings); - Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); + final AzureStorageServiceImpl mock = new AzureStorageServiceImpl(settings); + final Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); assertThat(azure1Proxy, notNullValue()); assertThat(azure1Proxy.type(), is(Proxy.Type.HTTP)); @@ -166,7 +224,7 @@ public void testProxyHttp() throws UnknownHostException { } public void testMultipleProxies() throws UnknownHostException { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.host", "127.0.0.1") .put("azure.client.azure1.proxy.port", 8080) @@ -175,12 +233,12 @@ public void testMultipleProxies() throws UnknownHostException { .put("azure.client.azure2.proxy.port", 8081) .put("azure.client.azure2.proxy.type", "http") .build(); - AzureStorageServiceImpl mock = createAzureService(settings); - Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); + final AzureStorageServiceImpl mock = new AzureStorageServiceImpl(settings); + final Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); assertThat(azure1Proxy, notNullValue()); assertThat(azure1Proxy.type(), is(Proxy.Type.HTTP)); assertThat(azure1Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080))); - Proxy azure2Proxy = mock.storageSettings.get("azure2").getProxy(); + final Proxy azure2Proxy = mock.storageSettings.get("azure2").getProxy(); assertThat(azure2Proxy, notNullValue()); assertThat(azure2Proxy.type(), is(Proxy.Type.HTTP)); assertThat(azure2Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8081))); @@ -188,14 +246,14 @@ public void testMultipleProxies() throws UnknownHostException { } public void testProxySocks() throws UnknownHostException { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.host", "127.0.0.1") .put("azure.client.azure1.proxy.port", 8080) .put("azure.client.azure1.proxy.type", "socks") .build(); - AzureStorageServiceImpl mock = createAzureService(settings); - Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); + final AzureStorageServiceImpl mock = new AzureStorageServiceImpl(settings); + final Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); assertThat(azure1Proxy, notNullValue()); assertThat(azure1Proxy.type(), is(Proxy.Type.SOCKS)); assertThat(azure1Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080))); @@ -204,47 +262,46 @@ public void testProxySocks() throws UnknownHostException { } public void testProxyNoHost() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.port", 8080) .put("azure.client.azure1.proxy.type", randomFrom("socks", "http")) .build(); - - SettingsException e = expectThrows(SettingsException.class, () -> createAzureService(settings)); + final SettingsException e = expectThrows(SettingsException.class, () -> new AzureStorageServiceImpl(settings)); assertEquals("Azure Proxy type has been set but proxy host or port is not defined.", e.getMessage()); } public void testProxyNoPort() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.host", "127.0.0.1") .put("azure.client.azure1.proxy.type", randomFrom("socks", "http")) .build(); - SettingsException e = expectThrows(SettingsException.class, () -> createAzureService(settings)); + final SettingsException e = expectThrows(SettingsException.class, () -> new AzureStorageServiceImpl(settings)); assertEquals("Azure Proxy type has been set but proxy host or port is not defined.", e.getMessage()); } public void testProxyNoType() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.host", "127.0.0.1") .put("azure.client.azure1.proxy.port", 8080) .build(); - SettingsException e = expectThrows(SettingsException.class, () -> createAzureService(settings)); + final SettingsException e = expectThrows(SettingsException.class, () -> new AzureStorageServiceImpl(settings)); assertEquals("Azure Proxy port or host have been set but proxy type is not defined.", e.getMessage()); } public void testProxyWrongHost() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.type", randomFrom("socks", "http")) .put("azure.client.azure1.proxy.host", "thisisnotavalidhostorwehavebeensuperunlucky") .put("azure.client.azure1.proxy.port", 8080) .build(); - SettingsException e = expectThrows(SettingsException.class, () -> createAzureService(settings)); + final SettingsException e = expectThrows(SettingsException.class, () -> new AzureStorageServiceImpl(settings)); assertEquals("Azure proxy host is unknown.", e.getMessage()); } @@ -260,7 +317,7 @@ public void testBlobNameFromUri() throws URISyntaxException { } private static MockSecureSettings buildSecureSettings() { - MockSecureSettings secureSettings = new MockSecureSettings(); + final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("azure.client.azure1.account", "myaccount1"); secureSettings.setString("azure.client.azure1.key", encodeKey("mykey1")); secureSettings.setString("azure.client.azure2.account", "myaccount2"); @@ -274,10 +331,6 @@ private static Settings buildSettings() { return Settings.builder().setSecureSettings(buildSecureSettings()).build(); } - private static AzureStorageServiceImpl createAzureService(final Settings settings) { - return new AzureStorageServiceImpl(settings, AzureStorageSettings.load(settings)); - } - private static String encodeKey(final String value) { return Base64.encode(value.getBytes(StandardCharsets.UTF_8)); } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 78fd9461ad54d..c20b99790088e 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -64,18 +64,24 @@ class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore // https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload private static final int LARGE_BLOB_THRESHOLD_BYTE_SIZE = 5 * 1024 * 1024; - private final Storage storage; - private final String bucket; + private final String bucketName; + private final String clientName; + private final GoogleCloudStorageService storageService; - GoogleCloudStorageBlobStore(Settings settings, String bucket, Storage storage) { + GoogleCloudStorageBlobStore(Settings settings, String bucketName, String clientName, GoogleCloudStorageService storageService) { super(settings); - this.bucket = bucket; - this.storage = storage; - if (doesBucketExist(bucket) == false) { - throw new BlobStoreException("Bucket [" + bucket + "] does not exist"); + this.bucketName = bucketName; + this.clientName = clientName; + this.storageService = storageService; + if (doesBucketExist(bucketName) == false) { + throw new BlobStoreException("Bucket [" + bucketName + "] does not exist"); } } + private Storage client() throws IOException { + return storageService.client(clientName); + } + @Override public BlobContainer blobContainer(BlobPath path) { return new GoogleCloudStorageBlobContainer(path, this); @@ -91,14 +97,14 @@ public void close() { } /** - * Return true if the given bucket exists + * Return true iff the given bucket exists * * @param bucketName name of the bucket - * @return true if the bucket exists, false otherwise + * @return true iff the bucket exists */ boolean doesBucketExist(String bucketName) { try { - final Bucket bucket = SocketAccess.doPrivilegedIOException(() -> storage.get(bucketName)); + final Bucket bucket = SocketAccess.doPrivilegedIOException(() -> client().get(bucketName)); return bucket != null; } catch (final Exception e) { throw new BlobStoreException("Unable to check if bucket [" + bucketName + "] exists", e); @@ -106,10 +112,9 @@ boolean doesBucketExist(String bucketName) { } /** - * List blobs in the bucket under the specified path. The path root is removed. + * List blobs in the specific bucket under the specified path. The path root is removed. * - * @param path - * base path of the blobs to list + * @param path base path of the blobs to list * @return a map of blob names and their metadata */ Map listBlobs(String path) throws IOException { @@ -117,20 +122,19 @@ Map listBlobs(String path) throws IOException { } /** - * List all blobs in the bucket which have a prefix + * List all blobs in the specific bucket with names prefixed * * @param path * base path of the blobs to list. This path is removed from the * names of the blobs returned. - * @param prefix - * prefix of the blobs to list. + * @param prefix prefix of the blobs to list. * @return a map of blob names and their metadata. */ Map listBlobsByPrefix(String path, String prefix) throws IOException { final String pathPrefix = buildKey(path, prefix); final MapBuilder mapBuilder = MapBuilder.newMapBuilder(); SocketAccess.doPrivilegedVoidIOException(() -> { - storage.get(bucket).list(BlobListOption.prefix(pathPrefix)).iterateAll().forEach(blob -> { + client().get(bucketName).list(BlobListOption.prefix(pathPrefix)).iterateAll().forEach(blob -> { assert blob.getName().startsWith(path); final String suffixName = blob.getName().substring(path.length()); mapBuilder.put(suffixName, new PlainBlobMetaData(suffixName, blob.getSize())); @@ -140,26 +144,26 @@ Map listBlobsByPrefix(String path, String prefix) throws I } /** - * Returns true if the blob exists in the bucket + * Returns true if the blob exists in the specific bucket * * @param blobName name of the blob - * @return true if the blob exists, false otherwise + * @return true iff the blob exists */ boolean blobExists(String blobName) throws IOException { - final BlobId blobId = BlobId.of(bucket, blobName); - final Blob blob = SocketAccess.doPrivilegedIOException(() -> storage.get(blobId)); + final BlobId blobId = BlobId.of(bucketName, blobName); + final Blob blob = SocketAccess.doPrivilegedIOException(() -> client().get(blobId)); return blob != null; } /** - * Returns an {@link java.io.InputStream} for a given blob + * Returns an {@link java.io.InputStream} for the given blob name * * @param blobName name of the blob - * @return an InputStream + * @return the InputStream used to read the blob's content */ InputStream readBlob(String blobName) throws IOException { - final BlobId blobId = BlobId.of(bucket, blobName); - final Blob blob = SocketAccess.doPrivilegedIOException(() -> storage.get(blobId)); + final BlobId blobId = BlobId.of(bucketName, blobName); + final Blob blob = SocketAccess.doPrivilegedIOException(() -> client().get(blobId)); if (blob == null) { throw new NoSuchFileException("Blob [" + blobName + "] does not exit"); } @@ -184,13 +188,13 @@ public void close() throws IOException { } /** - * Writes a blob in the bucket. + * Writes a blob in the specific bucket * * @param inputStream content of the blob to be written * @param blobSize expected size of the blob to be written */ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - final BlobInfo blobInfo = BlobInfo.newBuilder(bucket, blobName).build(); + final BlobInfo blobInfo = BlobInfo.newBuilder(bucketName, blobName).build(); if (blobSize > LARGE_BLOB_THRESHOLD_BYTE_SIZE) { writeBlobResumable(blobInfo, inputStream); } else { @@ -208,8 +212,8 @@ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws I */ private void writeBlobResumable(BlobInfo blobInfo, InputStream inputStream) throws IOException { try { - final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException( - () -> storage.writer(blobInfo, Storage.BlobWriteOption.doesNotExist())); + final WriteChannel writeChannel = SocketAccess + .doPrivilegedIOException(() -> client().writer(blobInfo, Storage.BlobWriteOption.doesNotExist())); Streams.copy(inputStream, Channels.newOutputStream(new WritableByteChannel() { @Override public boolean isOpen() { @@ -227,7 +231,7 @@ public int write(ByteBuffer src) throws IOException { return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); } })); - } catch (StorageException se) { + } catch (final StorageException se) { if (se.getCode() == HTTP_PRECON_FAILED) { throw new FileAlreadyExistsException(blobInfo.getBlobId().getName(), null, se.getMessage()); } @@ -249,45 +253,43 @@ private void writeBlobMultipart(BlobInfo blobInfo, InputStream inputStream, long assert blobSize <= LARGE_BLOB_THRESHOLD_BYTE_SIZE : "large blob uploads should use the resumable upload method"; final ByteArrayOutputStream baos = new ByteArrayOutputStream(Math.toIntExact(blobSize)); Streams.copy(inputStream, baos); - SocketAccess.doPrivilegedVoidIOException( - () -> { - try { - storage.create(blobInfo, baos.toByteArray(), Storage.BlobTargetOption.doesNotExist()); - } catch (StorageException se) { - if (se.getCode() == HTTP_PRECON_FAILED) { - throw new FileAlreadyExistsException(blobInfo.getBlobId().getName(), null, se.getMessage()); - } - throw se; - } - }); + try { + SocketAccess.doPrivilegedVoidIOException( + () -> client().create(blobInfo, baos.toByteArray(), Storage.BlobTargetOption.doesNotExist())); + } catch (final StorageException se) { + if (se.getCode() == HTTP_PRECON_FAILED) { + throw new FileAlreadyExistsException(blobInfo.getBlobId().getName(), null, se.getMessage()); + } + throw se; + } } /** - * Deletes a blob in the bucket + * Deletes the blob from the specific bucket * * @param blobName name of the blob */ void deleteBlob(String blobName) throws IOException { - final BlobId blobId = BlobId.of(bucket, blobName); - final boolean deleted = SocketAccess.doPrivilegedIOException(() -> storage.delete(blobId)); + final BlobId blobId = BlobId.of(bucketName, blobName); + final boolean deleted = SocketAccess.doPrivilegedIOException(() -> client().delete(blobId)); if (deleted == false) { throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); } } /** - * Deletes multiple blobs in the bucket that have a given prefix + * Deletes multiple blobs from the specific bucket all of which have prefixed names * - * @param prefix prefix of the buckets to delete + * @param prefix prefix of the blobs to delete */ void deleteBlobsByPrefix(String prefix) throws IOException { deleteBlobs(listBlobsByPrefix("", prefix).keySet()); } /** - * Deletes multiple blobs in the given bucket (uses a batch request to perform this) + * Deletes multiple blobs from the specific bucket using a batch request * - * @param blobNames names of the bucket to delete + * @param blobNames names of the blobs to delete */ void deleteBlobs(Collection blobNames) throws IOException { if (blobNames.isEmpty()) { @@ -298,13 +300,13 @@ void deleteBlobs(Collection blobNames) throws IOException { deleteBlob(blobNames.iterator().next()); return; } - final List blobIdsToDelete = blobNames.stream().map(blobName -> BlobId.of(bucket, blobName)).collect(Collectors.toList()); - final List deletedStatuses = SocketAccess.doPrivilegedIOException(() -> storage.delete(blobIdsToDelete)); + final List blobIdsToDelete = blobNames.stream().map(blob -> BlobId.of(bucketName, blob)).collect(Collectors.toList()); + final List deletedStatuses = SocketAccess.doPrivilegedIOException(() -> client().delete(blobIdsToDelete)); assert blobIdsToDelete.size() == deletedStatuses.size(); boolean failed = false; for (int i = 0; i < blobIdsToDelete.size(); i++) { if (deletedStatuses.get(i) == false) { - logger.error("Failed to delete blob [{}] in bucket [{}]", blobIdsToDelete.get(i).getName(), bucket); + logger.error("Failed to delete blob [{}] in bucket [{}]", blobIdsToDelete.get(i).getName(), bucketName); failed = true; } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java index 1d2d70584adf9..12e7fd26ff565 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -24,35 +24,34 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; - import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; -public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin { +public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin, ReloadablePlugin { - private final Map clientsSettings; + // package-private for tests + final GoogleCloudStorageService storageService; public GoogleCloudStoragePlugin(final Settings settings) { - clientsSettings = GoogleCloudStorageClientSettings.load(settings); - } - - protected Map getClientsSettings() { - return clientsSettings; + this.storageService = createStorageService(settings); + // eagerly load client settings so that secure settings are readable (not closed) + reload(settings); } // overridable for tests - protected GoogleCloudStorageService createStorageService(Environment environment) { - return new GoogleCloudStorageService(environment, clientsSettings); + protected GoogleCloudStorageService createStorageService(Settings settings) { + return new GoogleCloudStorageService(settings); } @Override public Map getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry) { return Collections.singletonMap(GoogleCloudStorageRepository.TYPE, - (metadata) -> new GoogleCloudStorageRepository(metadata, env, namedXContentRegistry, createStorageService(env))); + (metadata) -> new GoogleCloudStorageRepository(metadata, env, namedXContentRegistry, this.storageService)); } @Override @@ -66,4 +65,15 @@ public List> getSettings() { GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING, GoogleCloudStorageClientSettings.TOKEN_URI_SETTING); } + + @Override + public void reload(Settings settings) { + // Secure settings should be readable inside this method. Duplicate client + // settings in a format (`GoogleCloudStorageClientSettings`) that does not + // require for the `SecureSettings` to be open. Pass that around (the + // `GoogleCloudStorageClientSettings` instance) instead of the `Settings` + // instance. + final Map clientsSettings = GoogleCloudStorageClientSettings.load(settings); + this.storageService.refreshAndClearCache(clientsSettings); + } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index 976befae0a269..83d48eeda20aa 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -38,8 +38,6 @@ import static org.elasticsearch.common.settings.Setting.byteSizeSetting; import static org.elasticsearch.common.settings.Setting.simpleString; -import com.google.cloud.storage.Storage; - class GoogleCloudStorageRepository extends BlobStoreRepository { // package private for testing @@ -86,8 +84,7 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { logger.debug("using bucket [{}], base_path [{}], chunk_size [{}], compress [{}]", bucket, basePath, chunkSize, compress); - Storage client = SocketAccess.doPrivilegedIOException(() -> storageService.createClient(clientName)); - this.blobStore = new GoogleCloudStorageBlobStore(settings, bucket, client); + this.blobStore = new GoogleCloudStorageBlobStore(settings, bucket, clientName, storageService); } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index 57bcc4b131356..b24674da174c3 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -28,11 +28,13 @@ import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageOptions; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.env.Environment; +import org.elasticsearch.common.util.LazyInitializable; import java.io.IOException; import java.net.HttpURLConnection; @@ -40,30 +42,74 @@ import java.net.URISyntaxException; import java.net.URL; import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; + +import static java.util.Collections.emptyMap; public class GoogleCloudStorageService extends AbstractComponent { - /** Clients settings identified by client name. */ - private final Map clientsSettings; + /** + * Dictionary of client instances. Client instances are built lazily from the + * latest settings. + */ + private final AtomicReference>> clientsCache = new AtomicReference<>(emptyMap()); - public GoogleCloudStorageService(final Environment environment, final Map clientsSettings) { - super(environment.settings()); - this.clientsSettings = clientsSettings; + public GoogleCloudStorageService(final Settings settings) { + super(settings); } /** - * Creates a client that can be used to manage Google Cloud Storage objects. + * Refreshes the client settings and clears the client cache. Subsequent calls to + * {@code GoogleCloudStorageService#client} will return new clients constructed + * using the parameter settings. * - * @param clientName name of client settings to use, including secure settings - * @return a Client instance that can be used to manage Storage objects + * @param clientsSettings the new settings used for building clients for subsequent requests + */ + public synchronized void refreshAndClearCache(Map clientsSettings) { + // build the new lazy clients + final MapBuilder> newClientsCache = MapBuilder.newMapBuilder(); + for (final Map.Entry entry : clientsSettings.entrySet()) { + newClientsCache.put(entry.getKey(), + new LazyInitializable(() -> createClient(entry.getKey(), entry.getValue()))); + } + // make the new clients available + final Map> oldClientCache = clientsCache.getAndSet(newClientsCache.immutableMap()); + // release old clients + oldClientCache.values().forEach(LazyInitializable::reset); + } + + /** + * Attempts to retrieve a client from the cache. If the client does not exist it + * will be created from the latest settings and will populate the cache. The + * returned instance should not be cached by the calling code. Instead, for each + * use, the (possibly updated) instance should be requested by calling this + * method. + * + * @param clientName name of the client settings used to create the client + * @return a cached client storage instance that can be used to manage objects + * (blobs) */ - public Storage createClient(final String clientName) throws Exception { - final GoogleCloudStorageClientSettings clientSettings = clientsSettings.get(clientName); - if (clientSettings == null) { + public Storage client(final String clientName) throws IOException { + final LazyInitializable lazyClient = clientsCache.get().get(clientName); + if (lazyClient == null) { throw new IllegalArgumentException("Unknown client name [" + clientName + "]. Existing client configs: " - + Strings.collectionToDelimitedString(clientsSettings.keySet(), ",")); + + Strings.collectionToDelimitedString(clientsCache.get().keySet(), ",")); } - final HttpTransport httpTransport = createHttpTransport(clientSettings.getHost()); + return lazyClient.getOrCompute(); + } + + /** + * Creates a client that can be used to manage Google Cloud Storage objects. The client is thread-safe. + * + * @param clientName name of client settings to use, including secure settings + * @param clientSettings name of client settings to use, including secure settings + * @return a new client storage instance that can be used to manage objects + * (blobs) + */ + private Storage createClient(final String clientName, final GoogleCloudStorageClientSettings clientSettings) throws IOException { + logger.debug(() -> new ParameterizedMessage("creating GCS client with client_name [{}], endpoint [{}]", clientName, + clientSettings.getHost())); + final HttpTransport httpTransport = SocketAccess.doPrivilegedIOException(() -> createHttpTransport(clientSettings.getHost())); final HttpTransportOptions httpTransportOptions = HttpTransportOptions.newBuilder() .setConnectTimeout(toTimeout(clientSettings.getConnectTimeout())) .setReadTimeout(toTimeout(clientSettings.getReadTimeout())) @@ -114,6 +160,9 @@ private static HttpTransport createHttpTransport(final String endpoint) throws E builder.trustCertificates(GoogleUtils.getCertificateTrustStore()); if (Strings.hasLength(endpoint)) { final URL endpointUrl = URI.create(endpoint).toURL(); + // it is crucial to open a connection for each URL (see {@code + // DefaultConnectionFactory#openConnection}) instead of reusing connections, + // because the storage instance has to be thread-safe as it is cached. builder.setConnectionFactory(new DefaultConnectionFactory() { @Override public HttpURLConnection openConnection(final URL originalUrl) throws IOException { diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java index 27736e24dbf51..0cc1243f28311 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java @@ -26,11 +26,22 @@ import java.util.Locale; import java.util.concurrent.ConcurrentHashMap; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + public class GoogleCloudStorageBlobStoreContainerTests extends ESBlobStoreContainerTestCase { @Override protected BlobStore newBlobStore() { - String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); - return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, new MockStorage(bucket, new ConcurrentHashMap<>())); + final String bucketName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + final String clientName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + final GoogleCloudStorageService storageService = mock(GoogleCloudStorageService.class); + try { + when(storageService.client(any(String.class))).thenReturn(new MockStorage(bucketName, new ConcurrentHashMap<>())); + } catch (final Exception e) { + throw new RuntimeException(e); + } + return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucketName, clientName, storageService); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index c4d9b67899672..3692b26f2bbb7 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -24,14 +24,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase; import org.junit.AfterClass; import java.util.Collection; import java.util.Collections; -import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -73,19 +71,19 @@ public MockGoogleCloudStoragePlugin(final Settings settings) { } @Override - protected GoogleCloudStorageService createStorageService(Environment environment) { - return new MockGoogleCloudStorageService(environment, getClientsSettings()); + protected GoogleCloudStorageService createStorageService(Settings settings) { + return new MockGoogleCloudStorageService(settings); } } public static class MockGoogleCloudStorageService extends GoogleCloudStorageService { - MockGoogleCloudStorageService(Environment environment, Map clientsSettings) { - super(environment, clientsSettings); + MockGoogleCloudStorageService(Settings settings) { + super(settings); } @Override - public Storage createClient(String clientName) { + public Storage client(String clientName) { return new MockStorage(BUCKET, blobs); } } @@ -97,7 +95,7 @@ public void testChunkSize() { assertEquals(GoogleCloudStorageRepository.MAX_CHUNK_SIZE, chunkSize); // chunk size in settings - int size = randomIntBetween(1, 100); + final int size = randomIntBetween(1, 100); repositoryMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, Settings.builder().put("chunk_size", size + "mb").build()); chunkSize = GoogleCloudStorageRepository.getSetting(GoogleCloudStorageRepository.CHUNK_SIZE, repositoryMetaData); @@ -105,7 +103,7 @@ public void testChunkSize() { // zero bytes is not allowed IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - RepositoryMetaData repoMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, + final RepositoryMetaData repoMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, Settings.builder().put("chunk_size", "0").build()); GoogleCloudStorageRepository.getSetting(GoogleCloudStorageRepository.CHUNK_SIZE, repoMetaData); }); @@ -113,7 +111,7 @@ public void testChunkSize() { // negative bytes not allowed e = expectThrows(IllegalArgumentException.class, () -> { - RepositoryMetaData repoMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, + final RepositoryMetaData repoMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, Settings.builder().put("chunk_size", "-1").build()); GoogleCloudStorageRepository.getSetting(GoogleCloudStorageRepository.CHUNK_SIZE, repoMetaData); }); @@ -121,7 +119,7 @@ public void testChunkSize() { // greater than max chunk size not allowed e = expectThrows(IllegalArgumentException.class, () -> { - RepositoryMetaData repoMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, + final RepositoryMetaData repoMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, Settings.builder().put("chunk_size", "101mb").build()); GoogleCloudStorageRepository.getSetting(GoogleCloudStorageRepository.CHUNK_SIZE, repoMetaData); }); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java index 5e25307805235..4634bd3274a70 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java @@ -26,11 +26,22 @@ import java.util.Locale; import java.util.concurrent.ConcurrentHashMap; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + public class GoogleCloudStorageBlobStoreTests extends ESBlobStoreTestCase { @Override protected BlobStore newBlobStore() { - String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); - return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, new MockStorage(bucket, new ConcurrentHashMap<>())); + final String bucketName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + final String clientName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + final GoogleCloudStorageService storageService = mock(GoogleCloudStorageService.class); + try { + when(storageService.client(any(String.class))).thenReturn(new MockStorage(bucketName, new ConcurrentHashMap<>())); + } catch (final Exception e) { + throw new RuntimeException(e); + } + return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucketName, clientName, storageService); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java index a33ae90c549bc..0130d2c576cd5 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -23,28 +23,36 @@ import com.google.cloud.http.HttpTransportOptions; import com.google.cloud.storage.Storage; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.env.Environment; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import java.util.Collections; + +import java.security.KeyPair; +import java.security.KeyPairGenerator; +import java.util.Base64; import java.util.Locale; +import java.util.UUID; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.containsString; public class GoogleCloudStorageServiceTests extends ESTestCase { public void testClientInitializer() throws Exception { - final String clientName = randomAlphaOfLength(4).toLowerCase(Locale.ROOT); - final Environment environment = mock(Environment.class); + final String clientName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); final TimeValue connectTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); final TimeValue readTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); - final String applicationName = randomAlphaOfLength(4); - final String hostName = randomFrom("http://", "https://") + randomAlphaOfLength(4) + ":" + randomIntBetween(1, 65535); - final String projectIdName = randomAlphaOfLength(4); + final String applicationName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + final String endpoint = randomFrom("http://", "https://") + + randomFrom("www.elastic.co", "www.googleapis.com", "localhost/api", "google.com/oauth") + + ":" + randomIntBetween(1, 65535); + final String projectIdName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); final Settings settings = Settings.builder() .put(GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), connectTimeValue.getStringRep()) @@ -52,20 +60,18 @@ public void testClientInitializer() throws Exception { readTimeValue.getStringRep()) .put(GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName).getKey(), applicationName) - .put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), hostName) + .put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint) .put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName) .build(); - when(environment.settings()).thenReturn(settings); - final GoogleCloudStorageClientSettings clientSettings = GoogleCloudStorageClientSettings.getClientSettings(settings, clientName); - final GoogleCloudStorageService service = new GoogleCloudStorageService(environment, - Collections.singletonMap(clientName, clientSettings)); - final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.createClient("another_client")); + final GoogleCloudStorageService service = new GoogleCloudStorageService(settings); + service.refreshAndClearCache(GoogleCloudStorageClientSettings.load(settings)); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.client("another_client")); assertThat(e.getMessage(), Matchers.startsWith("Unknown client name")); assertSettingDeprecationsAndWarnings( new Setting[] { GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName) }); - final Storage storage = service.createClient(clientName); + final Storage storage = service.client(clientName); assertThat(storage.getOptions().getApplicationName(), Matchers.containsString(applicationName)); - assertThat(storage.getOptions().getHost(), Matchers.is(hostName)); + assertThat(storage.getOptions().getHost(), Matchers.is(endpoint)); assertThat(storage.getOptions().getProjectId(), Matchers.is(projectIdName)); assertThat(storage.getOptions().getTransportOptions(), Matchers.instanceOf(HttpTransportOptions.class)); assertThat(((HttpTransportOptions) storage.getOptions().getTransportOptions()).getConnectTimeout(), @@ -75,6 +81,58 @@ public void testClientInitializer() throws Exception { assertThat(storage.getOptions().getCredentials(), Matchers.nullValue(Credentials.class)); } + public void testReinitClientSettings() throws Exception { + final MockSecureSettings secureSettings1 = new MockSecureSettings(); + secureSettings1.setFile("gcs.client.gcs1.credentials_file", serviceAccountFileContent("project_gcs11")); + secureSettings1.setFile("gcs.client.gcs2.credentials_file", serviceAccountFileContent("project_gcs12")); + final Settings settings1 = Settings.builder().setSecureSettings(secureSettings1).build(); + final MockSecureSettings secureSettings2 = new MockSecureSettings(); + secureSettings2.setFile("gcs.client.gcs1.credentials_file", serviceAccountFileContent("project_gcs21")); + secureSettings2.setFile("gcs.client.gcs3.credentials_file", serviceAccountFileContent("project_gcs23")); + final Settings settings2 = Settings.builder().setSecureSettings(secureSettings2).build(); + try (GoogleCloudStoragePlugin plugin = new GoogleCloudStoragePlugin(settings1)) { + final GoogleCloudStorageService storageService = plugin.storageService; + final Storage client11 = storageService.client("gcs1"); + assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11")); + final Storage client12 = storageService.client("gcs2"); + assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12")); + // client 3 is missing + final IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> storageService.client("gcs3")); + assertThat(e1.getMessage(), containsString("Unknown client name [gcs3].")); + // update client settings + plugin.reload(settings2); + // old client 1 not changed + assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11")); + // new client 1 is changed + final Storage client21 = storageService.client("gcs1"); + assertThat(client21.getOptions().getProjectId(), equalTo("project_gcs21")); + // old client 2 not changed + assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12")); + // new client2 is gone + final IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, () -> storageService.client("gcs2")); + assertThat(e2.getMessage(), containsString("Unknown client name [gcs2].")); + // client 3 emerged + final Storage client23 = storageService.client("gcs3"); + assertThat(client23.getOptions().getProjectId(), equalTo("project_gcs23")); + } + } + + private byte[] serviceAccountFileContent(String projectId) throws Exception { + final KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA"); + keyPairGenerator.initialize(1024); + final KeyPair keyPair = keyPairGenerator.generateKeyPair(); + final String encodedKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded()); + final XContentBuilder serviceAccountBuilder = jsonBuilder().startObject() + .field("type", "service_account") + .field("project_id", projectId) + .field("private_key_id", UUID.randomUUID().toString()) + .field("private_key", "-----BEGIN PRIVATE KEY-----\n" + encodedKey + "\n-----END PRIVATE KEY-----\n") + .field("client_email", "integration_test@appspot.gserviceaccount.com") + .field("client_id", "client_id") + .endObject(); + return BytesReference.toBytes(BytesReference.bytes(serviceAccountBuilder)); + } + public void testToTimeout() { assertEquals(-1, GoogleCloudStorageService.toTimeout(null).intValue()); assertEquals(-1, GoogleCloudStorageService.toTimeout(TimeValue.ZERO).intValue()); diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 23252881cd75f..8448b2ab9e1ac 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -55,7 +55,7 @@ bundlePlugin { } additionalTest('testRepositoryCreds'){ - include '**/RepositorySettingsCredentialsTests.class' + include '**/RepositoryCredentialsTests.class' systemProperty 'es.allow_insecure_settings', 'true' } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AmazonS3Reference.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AmazonS3Reference.java new file mode 100644 index 0000000000000..6734fcfb56df5 --- /dev/null +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AmazonS3Reference.java @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.s3; + +import org.elasticsearch.common.util.concurrent.AbstractRefCounted; + +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3Client; + +import org.elasticsearch.common.lease.Releasable; + +/** + * Handles the shutdown of the wrapped {@link AmazonS3Client} using reference + * counting. + */ +public class AmazonS3Reference extends AbstractRefCounted implements Releasable { + + private final AmazonS3 client; + + AmazonS3Reference(AmazonS3 client) { + super("AWS_S3_CLIENT"); + this.client = client; + } + + /** + * Call when the client is not needed anymore. + */ + @Override + public void close() { + decRef(); + } + + /** + * Returns the underlying `AmazonS3` client. All method calls are permitted BUT + * NOT shutdown. Shutdown is called when reference count reaches 0. + */ + public AmazonS3 client() { + return client; + } + + @Override + protected void closeInternal() { + client.shutdown(); + } + +} \ No newline at end of file diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AwsS3Service.java index dbffe293a43b1..03b06c5b1bd34 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AwsS3Service.java @@ -19,14 +19,25 @@ package org.elasticsearch.repositories.s3; -import com.amazonaws.services.s3.AmazonS3; -import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.settings.Settings; +import java.io.Closeable; +import java.util.Map; -interface AwsS3Service extends LifecycleComponent { +interface AwsS3Service extends Closeable { /** - * Creates an {@code AmazonS3} client from the given repository metadata and node settings. + * Creates then caches an {@code AmazonS3} client using the current client + * settings. Returns an {@code AmazonS3Reference} wrapper which has to be + * released as soon as it is not needed anymore. */ - AmazonS3 client(Settings repositorySettings); + AmazonS3Reference client(String clientName); + + /** + * Updates settings for building clients and clears the client cache. Future + * client requests will use the new settings to lazily build new clients. + * + * @param clientsSettings the new refreshed settings + * @return the old stale settings + */ + Map refreshAndClearCache(Map clientsSettings); + } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java index d70ed9ea9aa8b..a54320f1fbd19 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java @@ -28,66 +28,91 @@ import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; + import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; -import java.util.HashMap; +import java.io.IOException; import java.util.Map; -import java.util.function.Function; - +import static java.util.Collections.emptyMap; -class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Service { - // pkg private for tests - static final Setting CLIENT_NAME = new Setting<>("client", "default", Function.identity()); +class InternalAwsS3Service extends AbstractComponent implements AwsS3Service { - private final Map clientsSettings; + private volatile Map clientsCache = emptyMap(); + private volatile Map clientsSettings = emptyMap(); - private final Map clientsCache = new HashMap<>(); - - InternalAwsS3Service(Settings settings, Map clientsSettings) { + InternalAwsS3Service(Settings settings) { super(settings); - this.clientsSettings = clientsSettings; } + /** + * Refreshes the settings for the AmazonS3 clients and clears the cache of + * existing clients. New clients will be build using these new settings. Old + * clients are usable until released. On release they will be destroyed instead + * to being returned to the cache. + */ @Override - public synchronized AmazonS3 client(Settings repositorySettings) { - String clientName = CLIENT_NAME.get(repositorySettings); - AmazonS3Client client = clientsCache.get(clientName); - if (client != null) { - return client; - } + public synchronized Map refreshAndClearCache(Map clientsSettings) { + // shutdown all unused clients + // others will shutdown on their respective release + releaseCachedClients(); + final Map prevSettings = this.clientsSettings; + this.clientsSettings = MapBuilder.newMapBuilder(clientsSettings).immutableMap(); + assert this.clientsSettings.containsKey("default") : "always at least have 'default'"; + // clients are built lazily by {@link client(String)} + return prevSettings; + } - S3ClientSettings clientSettings = clientsSettings.get(clientName); - if (clientSettings == null) { - throw new IllegalArgumentException("Unknown s3 client name [" + clientName + "]. Existing client configs: " + - Strings.collectionToDelimitedString(clientsSettings.keySet(), ",")); + /** + * Attempts to retrieve a client by name from the cache. If the client does not + * exist it will be created. + */ + @Override + public AmazonS3Reference client(String clientName) { + AmazonS3Reference clientReference = clientsCache.get(clientName); + if ((clientReference != null) && clientReference.tryIncRef()) { + return clientReference; } + synchronized (this) { + clientReference = clientsCache.get(clientName); + if ((clientReference != null) && clientReference.tryIncRef()) { + return clientReference; + } + final S3ClientSettings clientSettings = clientsSettings.get(clientName); + if (clientSettings == null) { + throw new IllegalArgumentException("Unknown s3 client name [" + clientName + "]. Existing client configs: " + + Strings.collectionToDelimitedString(clientsSettings.keySet(), ",")); + } + logger.debug("creating S3 client with client_name [{}], endpoint [{}]", clientName, clientSettings.endpoint); + clientReference = new AmazonS3Reference(buildClient(clientSettings)); + clientReference.incRef(); + clientsCache = MapBuilder.newMapBuilder(clientsCache).put(clientName, clientReference).immutableMap(); + return clientReference; + } + } - logger.debug("creating S3 client with client_name [{}], endpoint [{}]", clientName, clientSettings.endpoint); - - AWSCredentialsProvider credentials = buildCredentials(logger, deprecationLogger, clientSettings, repositorySettings); - ClientConfiguration configuration = buildConfiguration(clientSettings); - - client = new AmazonS3Client(credentials, configuration); - + private AmazonS3 buildClient(S3ClientSettings clientSettings) { + final AWSCredentialsProvider credentials = buildCredentials(logger, clientSettings); + final ClientConfiguration configuration = buildConfiguration(clientSettings); + final AmazonS3 client = buildClient(credentials, configuration); if (Strings.hasText(clientSettings.endpoint)) { client.setEndpoint(clientSettings.endpoint); } - - clientsCache.put(clientName, client); return client; } + // proxy for testing + AmazonS3 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) { + return new AmazonS3Client(credentials, configuration); + } + // pkg private for tests static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) { - ClientConfiguration clientConfiguration = new ClientConfiguration(); + final ClientConfiguration clientConfiguration = new ClientConfiguration(); // the response metadata cache is only there for diagnostics purposes, // but can force objects from every response to the old generation. clientConfiguration.setResponseMetadataCacheSize(0); @@ -109,27 +134,8 @@ static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) { } // pkg private for tests - static AWSCredentialsProvider buildCredentials(Logger logger, DeprecationLogger deprecationLogger, - S3ClientSettings clientSettings, Settings repositorySettings) { - - - BasicAWSCredentials credentials = clientSettings.credentials; - if (S3Repository.ACCESS_KEY_SETTING.exists(repositorySettings)) { - if (S3Repository.SECRET_KEY_SETTING.exists(repositorySettings) == false) { - throw new IllegalArgumentException("Repository setting [" + S3Repository.ACCESS_KEY_SETTING.getKey() + - " must be accompanied by setting [" + S3Repository.SECRET_KEY_SETTING.getKey() + "]"); - } - try (SecureString key = S3Repository.ACCESS_KEY_SETTING.get(repositorySettings); - SecureString secret = S3Repository.SECRET_KEY_SETTING.get(repositorySettings)) { - credentials = new BasicAWSCredentials(key.toString(), secret.toString()); - } - // backcompat for reading keys out of repository settings - deprecationLogger.deprecated("Using s3 access/secret key from repository settings. Instead " + - "store these in named clients and the elasticsearch keystore for secure settings."); - } else if (S3Repository.SECRET_KEY_SETTING.exists(repositorySettings)) { - throw new IllegalArgumentException("Repository setting [" + S3Repository.SECRET_KEY_SETTING.getKey() + - " must be accompanied by setting [" + S3Repository.ACCESS_KEY_SETTING.getKey() + "]"); - } + static AWSCredentialsProvider buildCredentials(Logger logger, S3ClientSettings clientSettings) { + final BasicAWSCredentials credentials = clientSettings.credentials; if (credentials == null) { logger.debug("Using instance profile credentials"); return new PrivilegedInstanceProfileCredentialsProvider(); @@ -139,21 +145,15 @@ static AWSCredentialsProvider buildCredentials(Logger logger, DeprecationLogger } } - @Override - protected void doStart() throws ElasticsearchException { - } - - @Override - protected void doStop() throws ElasticsearchException { - } - - @Override - protected void doClose() throws ElasticsearchException { - for (AmazonS3Client client : clientsCache.values()) { - client.shutdown(); + protected synchronized void releaseCachedClients() { + // the clients will shutdown when they will not be used anymore + for (final AmazonS3Reference clientReference : clientsCache.values()) { + clientReference.decRef(); } - - // Ensure that IdleConnectionReaper is shutdown + // clear previously cached clients, they will be build lazily + clientsCache = emptyMap(); + // shutdown IdleConnectionReaper background thread + // it will be restarted on new client usage IdleConnectionReaper.shutdown(); } @@ -174,4 +174,10 @@ public void refresh() { SocketAccess.doPrivilegedVoid(credentials::refresh); } } + + @Override + public void close() throws IOException { + releaseCachedClients(); + } + } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index 222802ae30437..102af6f9f5b4c 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -20,7 +20,6 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.AmazonClientException; -import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.AbortMultipartUploadRequest; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; @@ -47,8 +46,6 @@ import java.io.IOException; import java.io.InputStream; import java.nio.file.NoSuchFileException; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -70,19 +67,20 @@ class S3BlobContainer extends AbstractBlobContainer { @Override public boolean blobExists(String blobName) { - try { - return SocketAccess.doPrivileged(() -> blobStore.client().doesObjectExist(blobStore.bucket(), buildKey(blobName))); - } catch (Exception e) { + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + return SocketAccess.doPrivileged(() -> clientReference.client().doesObjectExist(blobStore.bucket(), buildKey(blobName))); + } catch (final Exception e) { throw new BlobStoreException("Failed to check if blob [" + blobName +"] exists", e); } } @Override public InputStream readBlob(String blobName) throws IOException { - try { - S3Object s3Object = SocketAccess.doPrivileged(() -> blobStore.client().getObject(blobStore.bucket(), buildKey(blobName))); + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(blobStore.bucket(), + buildKey(blobName))); return s3Object.getObjectContent(); - } catch (AmazonClientException e) { + } catch (final AmazonClientException e) { if (e instanceof AmazonS3Exception) { if (404 == ((AmazonS3Exception) e).getStatusCode()) { throw new NoSuchFileException("Blob object [" + blobName + "] not found: " + e.getMessage()); @@ -110,44 +108,45 @@ public void deleteBlob(String blobName) throws IOException { throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); } - try { - SocketAccess.doPrivilegedVoid(() -> blobStore.client().deleteObject(blobStore.bucket(), buildKey(blobName))); - } catch (AmazonClientException e) { + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + SocketAccess.doPrivilegedVoid(() -> clientReference.client().deleteObject(blobStore.bucket(), buildKey(blobName))); + } catch (final AmazonClientException e) { throw new IOException("Exception when deleting blob [" + blobName + "]", e); } } @Override public Map listBlobsByPrefix(@Nullable String blobNamePrefix) throws IOException { - return AccessController.doPrivileged((PrivilegedAction>) () -> { - MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); - AmazonS3 client = blobStore.client(); - SocketAccess.doPrivilegedVoid(() -> { - ObjectListing prevListing = null; - while (true) { - ObjectListing list; - if (prevListing != null) { - list = client.listNextBatchOfObjects(prevListing); - } else { - if (blobNamePrefix != null) { - list = client.listObjects(blobStore.bucket(), buildKey(blobNamePrefix)); - } else { - list = client.listObjects(blobStore.bucket(), keyPath); - } - } - for (S3ObjectSummary summary : list.getObjectSummaries()) { - String name = summary.getKey().substring(keyPath.length()); - blobsBuilder.put(name, new PlainBlobMetaData(name, summary.getSize())); - } - if (list.isTruncated()) { - prevListing = list; + final MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + ObjectListing prevListing = null; + while (true) { + ObjectListing list; + if (prevListing != null) { + final ObjectListing finalPrevListing = prevListing; + list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(finalPrevListing)); + } else { + if (blobNamePrefix != null) { + list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(blobStore.bucket(), + buildKey(blobNamePrefix))); } else { - break; + list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(blobStore.bucket(), keyPath)); } } - }); + for (final S3ObjectSummary summary : list.getObjectSummaries()) { + final String name = summary.getKey().substring(keyPath.length()); + blobsBuilder.put(name, new PlainBlobMetaData(name, summary.getSize())); + } + if (list.isTruncated()) { + prevListing = list; + } else { + break; + } + } return blobsBuilder.immutableMap(); - }); + } catch (final AmazonClientException e) { + throw new IOException("Exception when listing blobs by prefix [" + blobNamePrefix + "]", e); + } } @Override @@ -175,19 +174,20 @@ void executeSingleUpload(final S3BlobStore blobStore, throw new IllegalArgumentException("Upload request size [" + blobSize + "] can't be larger than buffer size"); } - try { - final ObjectMetadata md = new ObjectMetadata(); - md.setContentLength(blobSize); - if (blobStore.serverSideEncryption()) { - md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); - } - - final PutObjectRequest putRequest = new PutObjectRequest(blobStore.bucket(), blobName, input, md); - putRequest.setStorageClass(blobStore.getStorageClass()); - putRequest.setCannedAcl(blobStore.getCannedACL()); + final ObjectMetadata md = new ObjectMetadata(); + md.setContentLength(blobSize); + if (blobStore.serverSideEncryption()) { + md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); + } + final PutObjectRequest putRequest = new PutObjectRequest(blobStore.bucket(), blobName, input, md); + putRequest.setStorageClass(blobStore.getStorageClass()); + putRequest.setCannedAcl(blobStore.getCannedACL()); - blobStore.client().putObject(putRequest); - } catch (AmazonClientException e) { + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + SocketAccess.doPrivilegedVoid(() -> { + clientReference.client().putObject(putRequest); + }); + } catch (final AmazonClientException e) { throw new IOException("Unable to upload object [" + blobName + "] using a single upload", e); } } @@ -218,23 +218,23 @@ void executeMultipartUpload(final S3BlobStore blobStore, final int nbParts = multiparts.v1().intValue(); final long lastPartSize = multiparts.v2(); - assert blobSize == (nbParts - 1) * partSize + lastPartSize : "blobSize does not match multipart sizes"; + assert blobSize == (((nbParts - 1) * partSize) + lastPartSize) : "blobSize does not match multipart sizes"; final SetOnce uploadId = new SetOnce<>(); final String bucketName = blobStore.bucket(); boolean success = false; - try { - final InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, blobName); - initRequest.setStorageClass(blobStore.getStorageClass()); - initRequest.setCannedACL(blobStore.getCannedACL()); - if (blobStore.serverSideEncryption()) { - final ObjectMetadata md = new ObjectMetadata(); - md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); - initRequest.setObjectMetadata(md); - } + final InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, blobName); + initRequest.setStorageClass(blobStore.getStorageClass()); + initRequest.setCannedACL(blobStore.getCannedACL()); + if (blobStore.serverSideEncryption()) { + final ObjectMetadata md = new ObjectMetadata(); + md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); + initRequest.setObjectMetadata(md); + } + try (AmazonS3Reference clientReference = blobStore.clientReference()) { - uploadId.set(blobStore.client().initiateMultipartUpload(initRequest).getUploadId()); + uploadId.set(SocketAccess.doPrivileged(() -> clientReference.client().initiateMultipartUpload(initRequest).getUploadId())); if (Strings.isEmpty(uploadId.get())) { throw new IOException("Failed to initialize multipart upload " + blobName); } @@ -259,7 +259,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, } bytesCount += uploadRequest.getPartSize(); - final UploadPartResult uploadResponse = blobStore.client().uploadPart(uploadRequest); + final UploadPartResult uploadResponse = SocketAccess.doPrivileged(() -> clientReference.client().uploadPart(uploadRequest)); parts.add(uploadResponse.getPartETag()); } @@ -268,16 +268,19 @@ void executeMultipartUpload(final S3BlobStore blobStore, + "bytes sent but got " + bytesCount); } - CompleteMultipartUploadRequest complRequest = new CompleteMultipartUploadRequest(bucketName, blobName, uploadId.get(), parts); - blobStore.client().completeMultipartUpload(complRequest); + final CompleteMultipartUploadRequest complRequest = new CompleteMultipartUploadRequest(bucketName, blobName, uploadId.get(), + parts); + SocketAccess.doPrivilegedVoid(() -> clientReference.client().completeMultipartUpload(complRequest)); success = true; - } catch (AmazonClientException e) { + } catch (final AmazonClientException e) { throw new IOException("Unable to upload object [" + blobName + "] using multipart upload", e); } finally { - if (success == false && Strings.hasLength(uploadId.get())) { + if ((success == false) && Strings.hasLength(uploadId.get())) { final AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest(bucketName, blobName, uploadId.get()); - blobStore.client().abortMultipartUpload(abortRequest); + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + SocketAccess.doPrivilegedVoid(() -> clientReference.client().abortMultipartUpload(abortRequest)); + } } } } @@ -296,7 +299,7 @@ static Tuple numberOfMultiparts(final long totalSize, final long par throw new IllegalArgumentException("Part size must be greater than zero"); } - if (totalSize == 0L || totalSize <= partSize) { + if ((totalSize == 0L) || (totalSize <= partSize)) { return Tuple.tuple(1L, totalSize); } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index 27349f12135ed..c0f61e4d07828 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -19,13 +19,13 @@ package org.elasticsearch.repositories.s3; -import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.CannedAccessControlList; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.S3ObjectSummary; import com.amazonaws.services.s3.model.StorageClass; + import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; @@ -34,14 +34,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -import java.security.AccessController; -import java.security.PrivilegedAction; +import java.io.IOException; import java.util.ArrayList; import java.util.Locale; class S3BlobStore extends AbstractComponent implements BlobStore { - private final AmazonS3 client; + private final AwsS3Service service; + + private final String clientName; private final String bucket; @@ -53,10 +54,11 @@ class S3BlobStore extends AbstractComponent implements BlobStore { private final StorageClass storageClass; - S3BlobStore(Settings settings, AmazonS3 client, String bucket, boolean serverSideEncryption, + S3BlobStore(Settings settings, AwsS3Service service, String clientName, String bucket, boolean serverSideEncryption, ByteSizeValue bufferSize, String cannedACL, String storageClass) { super(settings); - this.client = client; + this.service = service; + this.clientName = clientName; this.bucket = bucket; this.serverSideEncryption = serverSideEncryption; this.bufferSize = bufferSize; @@ -68,12 +70,14 @@ class S3BlobStore extends AbstractComponent implements BlobStore { // Also, if invalid security credentials are used to execute this method, the // client is not able to distinguish between bucket permission errors and // invalid credential errors, and this method could return an incorrect result. - SocketAccess.doPrivilegedVoid(() -> { - if (client.doesBucketExist(bucket) == false) { - throw new IllegalArgumentException("The bucket [" + bucket + "] does not exist. Please create it before " + - " creating an s3 snapshot repository backed by it."); - } - }); + try (AmazonS3Reference clientReference = clientReference()) { + SocketAccess.doPrivilegedVoid(() -> { + if (clientReference.client().doesBucketExist(bucket) == false) { + throw new IllegalArgumentException("The bucket [" + bucket + "] does not exist. Please create it before " + + " creating an s3 snapshot repository backed by it."); + } + }); + } } @Override @@ -81,8 +85,8 @@ public String toString() { return bucket; } - public AmazonS3 client() { - return client; + public AmazonS3Reference clientReference() { + return service.client(clientName); } public String bucket() { @@ -104,27 +108,30 @@ public BlobContainer blobContainer(BlobPath path) { @Override public void delete(BlobPath path) { - AccessController.doPrivileged((PrivilegedAction) () -> { + try (AmazonS3Reference clientReference = clientReference()) { ObjectListing prevListing = null; - //From http://docs.amazonwebservices.com/AmazonS3/latest/dev/DeletingMultipleObjectsUsingJava.html - //we can do at most 1K objects per delete - //We don't know the bucket name until first object listing + // From + // http://docs.amazonwebservices.com/AmazonS3/latest/dev/DeletingMultipleObjectsUsingJava.html + // we can do at most 1K objects per delete + // We don't know the bucket name until first object listing DeleteObjectsRequest multiObjectDeleteRequest = null; - ArrayList keys = new ArrayList<>(); + final ArrayList keys = new ArrayList<>(); while (true) { ObjectListing list; if (prevListing != null) { - list = client.listNextBatchOfObjects(prevListing); + final ObjectListing finalPrevListing = prevListing; + list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(finalPrevListing)); } else { - list = client.listObjects(bucket, path.buildAsString()); + list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(bucket, path.buildAsString())); multiObjectDeleteRequest = new DeleteObjectsRequest(list.getBucketName()); } - for (S3ObjectSummary summary : list.getObjectSummaries()) { + for (final S3ObjectSummary summary : list.getObjectSummaries()) { keys.add(new KeyVersion(summary.getKey())); - //Every 500 objects batch the delete request + // Every 500 objects batch the delete request if (keys.size() > 500) { multiObjectDeleteRequest.setKeys(keys); - client.deleteObjects(multiObjectDeleteRequest); + final DeleteObjectsRequest finalMultiObjectDeleteRequest = multiObjectDeleteRequest; + SocketAccess.doPrivilegedVoid(() -> clientReference.client().deleteObjects(finalMultiObjectDeleteRequest)); multiObjectDeleteRequest = new DeleteObjectsRequest(list.getBucketName()); keys.clear(); } @@ -137,14 +144,15 @@ public void delete(BlobPath path) { } if (!keys.isEmpty()) { multiObjectDeleteRequest.setKeys(keys); - client.deleteObjects(multiObjectDeleteRequest); + final DeleteObjectsRequest finalMultiObjectDeleteRequest = multiObjectDeleteRequest; + SocketAccess.doPrivilegedVoid(() -> clientReference.client().deleteObjects(finalMultiObjectDeleteRequest)); } - return null; - }); + } } @Override - public void close() { + public void close() throws IOException { + this.service.close(); } public CannedAccessControlList getCannedACL() { @@ -154,18 +162,18 @@ public CannedAccessControlList getCannedACL() { public StorageClass getStorageClass() { return storageClass; } public static StorageClass initStorageClass(String storageClass) { - if (storageClass == null || storageClass.equals("")) { + if ((storageClass == null) || storageClass.equals("")) { return StorageClass.Standard; } try { - StorageClass _storageClass = StorageClass.fromValue(storageClass.toUpperCase(Locale.ENGLISH)); + final StorageClass _storageClass = StorageClass.fromValue(storageClass.toUpperCase(Locale.ENGLISH)); if (_storageClass.equals(StorageClass.Glacier)) { throw new BlobStoreException("Glacier storage class is not supported"); } return _storageClass; - } catch (IllegalArgumentException illegalArgumentException) { + } catch (final IllegalArgumentException illegalArgumentException) { throw new BlobStoreException("`" + storageClass + "` is not a valid S3 Storage Class."); } } @@ -174,11 +182,11 @@ public static StorageClass initStorageClass(String storageClass) { * Constructs canned acl from string */ public static CannedAccessControlList initCannedACL(String cannedACL) { - if (cannedACL == null || cannedACL.equals("")) { + if ((cannedACL == null) || cannedACL.equals("")) { return CannedAccessControlList.Private; } - for (CannedAccessControlList cur : CannedAccessControlList.values()) { + for (final CannedAccessControlList cur : CannedAccessControlList.values()) { if (cur.toString().equalsIgnoreCase(cannedACL)) { return cur; } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java index 4d32d2518fff1..ef6088fe154bf 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java @@ -24,10 +24,11 @@ import java.util.Locale; import java.util.Map; import java.util.Set; - import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; import com.amazonaws.auth.BasicAWSCredentials; + +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; @@ -38,7 +39,7 @@ /** * A container for settings used to create an S3 client. */ -class S3ClientSettings { +final class S3ClientSettings { // prefix for s3 client settings private static final String PREFIX = "s3.client."; @@ -119,7 +120,7 @@ class S3ClientSettings { /** Whether the s3 client should use an exponential backoff retry policy. */ final boolean throttleRetries; - private S3ClientSettings(BasicAWSCredentials credentials, String endpoint, Protocol protocol, + protected S3ClientSettings(BasicAWSCredentials credentials, String endpoint, Protocol protocol, String proxyHost, int proxyPort, String proxyUsername, String proxyPassword, int readTimeoutMillis, int maxRetries, boolean throttleRetries) { this.credentials = credentials; @@ -140,9 +141,9 @@ private S3ClientSettings(BasicAWSCredentials credentials, String endpoint, Proto * Note this will always at least return a client named "default". */ static Map load(Settings settings) { - Set clientNames = settings.getGroups(PREFIX).keySet(); - Map clients = new HashMap<>(); - for (String clientName : clientNames) { + final Set clientNames = settings.getGroups(PREFIX).keySet(); + final Map clients = new HashMap<>(); + for (final String clientName : clientNames) { clients.put(clientName, getClientSettings(settings, clientName)); } if (clients.containsKey("default") == false) { @@ -153,23 +154,64 @@ static Map load(Settings settings) { return Collections.unmodifiableMap(clients); } - // pkg private for tests - /** Parse settings for a single client. */ - static S3ClientSettings getClientSettings(Settings settings, String clientName) { + static Map overrideCredentials(Map clientsSettings, + BasicAWSCredentials credentials) { + final MapBuilder mapBuilder = new MapBuilder<>(); + for (final Map.Entry entry : clientsSettings.entrySet()) { + final S3ClientSettings s3ClientSettings = new S3ClientSettings(credentials, entry.getValue().endpoint, + entry.getValue().protocol, entry.getValue().proxyHost, entry.getValue().proxyPort, entry.getValue().proxyUsername, + entry.getValue().proxyPassword, entry.getValue().readTimeoutMillis, entry.getValue().maxRetries, + entry.getValue().throttleRetries); + mapBuilder.put(entry.getKey(), s3ClientSettings); + } + return mapBuilder.immutableMap(); + } + + static boolean checkDeprecatedCredentials(Settings repositorySettings) { + if (S3Repository.ACCESS_KEY_SETTING.exists(repositorySettings)) { + if (S3Repository.SECRET_KEY_SETTING.exists(repositorySettings) == false) { + throw new IllegalArgumentException("Repository setting [" + S3Repository.ACCESS_KEY_SETTING.getKey() + + " must be accompanied by setting [" + S3Repository.SECRET_KEY_SETTING.getKey() + "]"); + } + return true; + } else if (S3Repository.SECRET_KEY_SETTING.exists(repositorySettings)) { + throw new IllegalArgumentException("Repository setting [" + S3Repository.SECRET_KEY_SETTING.getKey() + + " must be accompanied by setting [" + S3Repository.ACCESS_KEY_SETTING.getKey() + "]"); + } + return false; + } + + // backcompat for reading keys out of repository settings (clusterState) + static BasicAWSCredentials loadDeprecatedCredentials(Settings repositorySettings) { + assert checkDeprecatedCredentials(repositorySettings); + try (SecureString key = S3Repository.ACCESS_KEY_SETTING.get(repositorySettings); + SecureString secret = S3Repository.SECRET_KEY_SETTING.get(repositorySettings)) { + return new BasicAWSCredentials(key.toString(), secret.toString()); + } + } + + static BasicAWSCredentials loadCredentials(Settings settings, String clientName) { try (SecureString accessKey = getConfigValue(settings, clientName, ACCESS_KEY_SETTING); - SecureString secretKey = getConfigValue(settings, clientName, SECRET_KEY_SETTING); - SecureString proxyUsername = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING); - SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING)) { - BasicAWSCredentials credentials = null; + SecureString secretKey = getConfigValue(settings, clientName, SECRET_KEY_SETTING);) { if (accessKey.length() != 0) { if (secretKey.length() != 0) { - credentials = new BasicAWSCredentials(accessKey.toString(), secretKey.toString()); + return new BasicAWSCredentials(accessKey.toString(), secretKey.toString()); } else { throw new IllegalArgumentException("Missing secret key for s3 client [" + clientName + "]"); } } else if (secretKey.length() != 0) { throw new IllegalArgumentException("Missing access key for s3 client [" + clientName + "]"); } + return null; + } + } + + // pkg private for tests + /** Parse settings for a single client. */ + static S3ClientSettings getClientSettings(Settings settings, String clientName) { + final BasicAWSCredentials credentials = S3ClientSettings.loadCredentials(settings, clientName); + try (SecureString proxyUsername = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING); + SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING)) { return new S3ClientSettings( credentials, getConfigValue(settings, clientName, ENDPOINT_SETTING), @@ -187,7 +229,7 @@ static S3ClientSettings getClientSettings(Settings settings, String clientName) private static T getConfigValue(Settings settings, String clientName, Setting.AffixSetting clientSetting) { - Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); + final Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); return concreteSetting.get(settings); } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index c185027d67f26..063e266837bad 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -19,7 +19,8 @@ package org.elasticsearch.repositories.s3; -import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.auth.BasicAWSCredentials; + import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; @@ -35,6 +36,9 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import java.io.IOException; +import java.util.Map; +import java.util.function.Function; /** * Shared file system implementation of the BlobStoreRepository @@ -134,6 +138,8 @@ class S3Repository extends BlobStoreRepository { */ static final Setting CANNED_ACL_SETTING = Setting.simpleString("canned_acl"); + static final Setting CLIENT_NAME = new Setting<>("client", "default", Function.identity()); + /** * Specifies the path within bucket to repository data. Defaults to root directory. */ @@ -143,23 +149,24 @@ class S3Repository extends BlobStoreRepository { private final BlobPath basePath; - private ByteSizeValue chunkSize; + private final ByteSizeValue chunkSize; - private boolean compress; + private final boolean compress; /** * Constructs an s3 backed repository */ - S3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry, AwsS3Service s3Service) { + S3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry, + AwsS3Service awsService) throws IOException { super(metadata, settings, namedXContentRegistry); - String bucket = BUCKET_SETTING.get(metadata.settings()); + final String bucket = BUCKET_SETTING.get(metadata.settings()); if (bucket == null) { throw new RepositoryException(metadata.name(), "No bucket defined for s3 repository"); } - boolean serverSideEncryption = SERVER_SIDE_ENCRYPTION_SETTING.get(metadata.settings()); - ByteSizeValue bufferSize = BUFFER_SIZE_SETTING.get(metadata.settings()); + final boolean serverSideEncryption = SERVER_SIDE_ENCRYPTION_SETTING.get(metadata.settings()); + final ByteSizeValue bufferSize = BUFFER_SIZE_SETTING.get(metadata.settings()); this.chunkSize = CHUNK_SIZE_SETTING.get(metadata.settings()); this.compress = COMPRESS_SETTING.get(metadata.settings()); @@ -170,17 +177,22 @@ class S3Repository extends BlobStoreRepository { } // Parse and validate the user's S3 Storage Class setting - String storageClass = STORAGE_CLASS_SETTING.get(metadata.settings()); - String cannedACL = CANNED_ACL_SETTING.get(metadata.settings()); + final String storageClass = STORAGE_CLASS_SETTING.get(metadata.settings()); + final String cannedACL = CANNED_ACL_SETTING.get(metadata.settings()); + final String clientName = CLIENT_NAME.get(metadata.settings()); logger.debug("using bucket [{}], chunk_size [{}], server_side_encryption [{}], " + "buffer_size [{}], cannedACL [{}], storageClass [{}]", bucket, chunkSize, serverSideEncryption, bufferSize, cannedACL, storageClass); - AmazonS3 client = s3Service.client(metadata.settings()); - blobStore = new S3BlobStore(settings, client, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); + // deprecated behavior: override client credentials from the cluster state + // (repository settings) + if (S3ClientSettings.checkDeprecatedCredentials(metadata.settings())) { + overrideCredentialsFromClusterState(awsService); + } + blobStore = new S3BlobStore(settings, awsService, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); - String basePath = BASE_PATH_SETTING.get(metadata.settings()); + final String basePath = BASE_PATH_SETTING.get(metadata.settings()); if (Strings.hasLength(basePath)) { this.basePath = new BlobPath().add(basePath); } else { @@ -207,4 +219,14 @@ protected boolean isCompress() { protected ByteSizeValue chunkSize() { return chunkSize; } + + void overrideCredentialsFromClusterState(AwsS3Service awsService) { + deprecationLogger.deprecated("Using s3 access/secret key from repository settings. Instead " + + "store these in named clients and the elasticsearch keystore for secure settings."); + final BasicAWSCredentials insecureCredentials = S3ClientSettings.loadDeprecatedCredentials(metadata.settings()); + // hack, but that's ok because the whole if branch should be axed + final Map prevSettings = awsService.refreshAndClearCache(S3ClientSettings.load(Settings.EMPTY)); + final Map newSettings = S3ClientSettings.overrideCredentials(prevSettings, insecureCredentials); + awsService.refreshAndClearCache(newSettings); + } } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java index e31495efc0eef..93561c94d2b9a 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java @@ -19,6 +19,7 @@ package org.elasticsearch.repositories.s3; +import java.io.IOException; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Arrays; @@ -28,18 +29,20 @@ import com.amazonaws.util.json.Jackson; import org.elasticsearch.SpecialPermission; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; /** * A plugin to add a repository type that writes to and from the AWS S3. */ -public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin { +public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin, ReloadablePlugin { static { SpecialPermission.check(); @@ -50,30 +53,40 @@ public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin { // ClientConfiguration clinit has some classloader problems // TODO: fix that Class.forName("com.amazonaws.ClientConfiguration"); - } catch (ClassNotFoundException e) { + } catch (final ClassNotFoundException e) { throw new RuntimeException(e); } return null; }); } - private final Map clientsSettings; + private final AwsS3Service awsS3Service; public S3RepositoryPlugin(Settings settings) { + this.awsS3Service = getAwsS3Service(settings); // eagerly load client settings so that secure settings are read - clientsSettings = S3ClientSettings.load(settings); - assert clientsSettings.isEmpty() == false : "always at least have 'default'"; + final Map clientsSettings = S3ClientSettings.load(settings); + this.awsS3Service.refreshAndClearCache(clientsSettings); } - // overridable for tests - protected AwsS3Service createStorageService(Settings settings) { - return new InternalAwsS3Service(settings, clientsSettings); + protected S3RepositoryPlugin(AwsS3Service awsS3Service) { + this.awsS3Service = awsS3Service; + } + + // proxy method for testing + protected S3Repository getS3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry) + throws IOException { + return new S3Repository(metadata, settings, namedXContentRegistry, awsS3Service); + } + + // proxy method for testing + protected AwsS3Service getAwsS3Service(Settings settings) { + return new InternalAwsS3Service(settings); } @Override public Map getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry) { - return Collections.singletonMap(S3Repository.TYPE, - (metadata) -> new S3Repository(metadata, env.settings(), namedXContentRegistry, createStorageService(env.settings()))); + return Collections.singletonMap(S3Repository.TYPE, (metadata) -> getS3Repository(metadata, env.settings(), namedXContentRegistry)); } @Override @@ -94,4 +107,16 @@ public List> getSettings() { S3Repository.ACCESS_KEY_SETTING, S3Repository.SECRET_KEY_SETTING); } + + @Override + public void reload(Settings settings) { + // secure settings should be readable + final Map clientsSettings = S3ClientSettings.load(settings); + awsS3Service.refreshAndClearCache(clientsSettings); + } + + @Override + public void close() throws IOException { + awsS3Service.close(); + } } diff --git a/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy index d8fca1fc89938..5fd69b4c2fc3f 100644 --- a/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy @@ -37,4 +37,7 @@ grant { // s3 client opens socket connections for to access repository permission java.net.SocketPermission "*", "connect"; + + // only for tests : org.elasticsearch.repositories.s3.S3RepositoryPlugin + permission java.util.PropertyPermission "es.allow_insecure_settings", "read,write"; }; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java index b40dc75c83701..dd829ee90c12f 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java @@ -19,7 +19,6 @@ package org.elasticsearch.repositories.s3; -import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.S3ObjectSummary; @@ -180,13 +179,13 @@ public void testEncryption() { Settings settings = internalCluster().getInstance(Settings.class); Settings bucket = settings.getByPrefix("repositories.s3."); - AmazonS3 s3Client = internalCluster().getInstance(AwsS3Service.class).client(repositorySettings); - - String bucketName = bucket.get("bucket"); - logger.info("--> verify encryption for bucket [{}], prefix [{}]", bucketName, basePath); - List summaries = s3Client.listObjects(bucketName, basePath).getObjectSummaries(); - for (S3ObjectSummary summary : summaries) { - assertThat(s3Client.getObjectMetadata(bucketName, summary.getKey()).getSSEAlgorithm(), equalTo("AES256")); + try (AmazonS3Reference s3Client = internalCluster().getInstance(AwsS3Service.class).client("default")) { + String bucketName = bucket.get("bucket"); + logger.info("--> verify encryption for bucket [{}], prefix [{}]", bucketName, basePath); + List summaries = s3Client.client().listObjects(bucketName, basePath).getObjectSummaries(); + for (S3ObjectSummary summary : summaries) { + assertThat(s3Client.client().getObjectMetadata(bucketName, summary.getKey()).getSSEAlgorithm(), equalTo("AES256")); + } } logger.info("--> delete some data"); @@ -443,8 +442,7 @@ public void cleanRepositoryFiles(String basePath) { // We check that settings has been set in elasticsearch.yml integration test file // as described in README assertThat("Your settings in elasticsearch.yml are incorrect. Check README file.", bucketName, notNullValue()); - AmazonS3 client = internalCluster().getInstance(AwsS3Service.class).client(Settings.EMPTY); - try { + try (AmazonS3Reference s3Client = internalCluster().getInstance(AwsS3Service.class).client("default")) { ObjectListing prevListing = null; //From http://docs.amazonwebservices.com/AmazonS3/latest/dev/DeletingMultipleObjectsUsingJava.html //we can do at most 1K objects per delete @@ -454,9 +452,9 @@ public void cleanRepositoryFiles(String basePath) { while (true) { ObjectListing list; if (prevListing != null) { - list = client.listNextBatchOfObjects(prevListing); + list = s3Client.client().listNextBatchOfObjects(prevListing); } else { - list = client.listObjects(bucketName, basePath); + list = s3Client.client().listObjects(bucketName, basePath); multiObjectDeleteRequest = new DeleteObjectsRequest(list.getBucketName()); } for (S3ObjectSummary summary : list.getObjectSummaries()) { @@ -464,7 +462,7 @@ public void cleanRepositoryFiles(String basePath) { //Every 500 objects batch the delete request if (keys.size() > 500) { multiObjectDeleteRequest.setKeys(keys); - client.deleteObjects(multiObjectDeleteRequest); + s3Client.client().deleteObjects(multiObjectDeleteRequest); multiObjectDeleteRequest = new DeleteObjectsRequest(list.getBucketName()); keys.clear(); } @@ -477,7 +475,7 @@ public void cleanRepositoryFiles(String basePath) { } if (!keys.isEmpty()) { multiObjectDeleteRequest.setKeys(keys); - client.deleteObjects(multiObjectDeleteRequest); + s3Client.client().deleteObjects(multiObjectDeleteRequest); } } catch (Exception ex) { logger.warn((Supplier) () -> new ParameterizedMessage("Failed to delete S3 repository [{}]", bucketName), ex); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Wrapper.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Wrapper.java index bcab130e7d531..91b364011b80a 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Wrapper.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Wrapper.java @@ -727,4 +727,9 @@ public BucketReplicationConfiguration getBucketReplicationConfiguration(GetBucke public HeadBucketResult headBucket(HeadBucketRequest headBucketRequest) throws AmazonClientException, AmazonServiceException { return delegate.headBucket(headBucketRequest); } + + @Override + public void shutdown() { + delegate.shutdown(); + } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java index 353de31fa1873..6f55f3ed345df 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java @@ -21,75 +21,89 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; -import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.internal.StaticCredentialsProvider; + import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import java.util.Locale; +import java.util.Map; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class AwsS3ServiceImplTests extends ESTestCase { - public void testAWSCredentialsWithSystemProviders() { - S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(Settings.EMPTY, "default"); - AWSCredentialsProvider credentialsProvider = - InternalAwsS3Service.buildCredentials(logger, deprecationLogger, clientSettings, Settings.EMPTY); + public void testAWSCredentialsDefaultToInstanceProviders() { + final String inexistentClientName = randomAlphaOfLength(8).toLowerCase(Locale.ROOT); + final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(Settings.EMPTY, inexistentClientName); + final AWSCredentialsProvider credentialsProvider = InternalAwsS3Service.buildCredentials(logger, clientSettings); assertThat(credentialsProvider, instanceOf(InternalAwsS3Service.PrivilegedInstanceProfileCredentialsProvider.class)); } - public void testAwsCredsDefaultSettings() { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("s3.client.default.access_key", "aws_key"); - secureSettings.setString("s3.client.default.secret_key", "aws_secret"); - Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); - assertCredentials(Settings.EMPTY, settings, "aws_key", "aws_secret"); - } - - public void testAwsCredsExplicitConfigSettings() { - Settings repositorySettings = Settings.builder().put(InternalAwsS3Service.CLIENT_NAME.getKey(), "myconfig").build(); - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("s3.client.myconfig.access_key", "aws_key"); - secureSettings.setString("s3.client.myconfig.secret_key", "aws_secret"); - secureSettings.setString("s3.client.default.access_key", "wrong_key"); - secureSettings.setString("s3.client.default.secret_key", "wrong_secret"); - Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); - assertCredentials(repositorySettings, settings, "aws_key", "aws_secret"); - } - - public void testRepositorySettingsCredentialsDisallowed() { - Settings repositorySettings = Settings.builder() - .put(S3Repository.ACCESS_KEY_SETTING.getKey(), "aws_key") - .put(S3Repository.SECRET_KEY_SETTING.getKey(), "aws_secret").build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - assertCredentials(repositorySettings, Settings.EMPTY, "aws_key", "aws_secret")); - assertThat(e.getMessage(), containsString("Setting [access_key] is insecure")); - } - - public void testRepositorySettingsCredentialsMissingKey() { - Settings repositorySettings = Settings.builder().put(S3Repository.SECRET_KEY_SETTING.getKey(), "aws_secret").build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - assertCredentials(repositorySettings, Settings.EMPTY, "aws_key", "aws_secret")); - assertThat(e.getMessage(), containsString("must be accompanied by setting [access_key]")); + public void testAWSCredentialsFromKeystore() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + final String clientNamePrefix = "some_client_name_"; + final int clientsCount = randomIntBetween(0, 4); + for (int i = 0; i < clientsCount; i++) { + final String clientName = clientNamePrefix + i; + secureSettings.setString("s3.client." + clientName + ".access_key", clientName + "_aws_access_key"); + secureSettings.setString("s3.client." + clientName + ".secret_key", clientName + "_aws_secret_key"); + } + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + final Map allClientsSettings = S3ClientSettings.load(settings); + // no less, no more + assertThat(allClientsSettings.size(), is(clientsCount + 1)); // including default + for (int i = 0; i < clientsCount; i++) { + final String clientName = clientNamePrefix + i; + final S3ClientSettings someClientSettings = allClientsSettings.get(clientName); + final AWSCredentialsProvider credentialsProvider = InternalAwsS3Service.buildCredentials(logger, someClientSettings); + assertThat(credentialsProvider, instanceOf(StaticCredentialsProvider.class)); + assertThat(credentialsProvider.getCredentials().getAWSAccessKeyId(), is(clientName + "_aws_access_key")); + assertThat(credentialsProvider.getCredentials().getAWSSecretKey(), is(clientName + "_aws_secret_key")); + } + // test default exists and is an Instance provider + final S3ClientSettings defaultClientSettings = allClientsSettings.get("default"); + final AWSCredentialsProvider defaultCredentialsProvider = InternalAwsS3Service.buildCredentials(logger, defaultClientSettings); + assertThat(defaultCredentialsProvider, instanceOf(InternalAwsS3Service.PrivilegedInstanceProfileCredentialsProvider.class)); } - public void testRepositorySettingsCredentialsMissingSecret() { - Settings repositorySettings = Settings.builder().put(S3Repository.ACCESS_KEY_SETTING.getKey(), "aws_key").build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - assertCredentials(repositorySettings, Settings.EMPTY, "aws_key", "aws_secret")); - assertThat(e.getMessage(), containsString("must be accompanied by setting [secret_key]")); + public void testSetDefaultCredential() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + final String awsAccessKey = randomAlphaOfLength(8); + final String awsSecretKey = randomAlphaOfLength(8); + secureSettings.setString("s3.client.default.access_key", awsAccessKey); + secureSettings.setString("s3.client.default.secret_key", awsSecretKey); + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + final Map allClientsSettings = S3ClientSettings.load(settings); + assertThat(allClientsSettings.size(), is(1)); + // test default exists and is an Instance provider + final S3ClientSettings defaultClientSettings = allClientsSettings.get("default"); + final AWSCredentialsProvider defaultCredentialsProvider = InternalAwsS3Service.buildCredentials(logger, defaultClientSettings); + assertThat(defaultCredentialsProvider, instanceOf(StaticCredentialsProvider.class)); + assertThat(defaultCredentialsProvider.getCredentials().getAWSAccessKeyId(), is(awsAccessKey)); + assertThat(defaultCredentialsProvider.getCredentials().getAWSSecretKey(), is(awsSecretKey)); } - private void assertCredentials(Settings singleRepositorySettings, Settings settings, - String expectedKey, String expectedSecret) { - String configName = InternalAwsS3Service.CLIENT_NAME.get(singleRepositorySettings); - S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, configName); - AWSCredentials credentials = InternalAwsS3Service.buildCredentials(logger, deprecationLogger, - clientSettings, singleRepositorySettings).getCredentials(); - assertThat(credentials.getAWSAccessKeyId(), is(expectedKey)); - assertThat(credentials.getAWSSecretKey(), is(expectedSecret)); + public void testCredentialsIncomplete() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + final String clientName = randomAlphaOfLength(8).toLowerCase(Locale.ROOT); + final boolean missingOrMissing = randomBoolean(); + if (missingOrMissing) { + secureSettings.setString("s3.client." + clientName + ".access_key", "aws_access_key"); + } else { + secureSettings.setString("s3.client." + clientName + ".secret_key", "aws_secret_key"); + } + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + final Exception e = expectThrows(IllegalArgumentException.class, () -> S3ClientSettings.load(settings)); + if (missingOrMissing) { + assertThat(e.getMessage(), containsString("Missing secret key for s3 client [" + clientName + "]")); + } else { + assertThat(e.getMessage(), containsString("Missing access key for s3 client [" + clientName + "]")); + } } public void testAWSDefaultConfiguration() { @@ -98,10 +112,10 @@ public void testAWSDefaultConfiguration() { } public void testAWSConfigurationWithAwsSettings() { - MockSecureSettings secureSettings = new MockSecureSettings(); + final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("s3.client.default.proxy.username", "aws_proxy_username"); secureSettings.setString("s3.client.default.proxy.password", "aws_proxy_password"); - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(secureSettings) .put("s3.client.default.protocol", "http") .put("s3.client.default.proxy.host", "aws_proxy_host") @@ -113,7 +127,7 @@ public void testAWSConfigurationWithAwsSettings() { } public void testRepositoryMaxRetries() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .put("s3.client.default.max_retries", 5) .build(); launchAWSConfigurationTest(settings, Protocol.HTTPS, null, -1, null, @@ -123,7 +137,7 @@ public void testRepositoryMaxRetries() { public void testRepositoryThrottleRetries() { final boolean throttling = randomBoolean(); - Settings settings = Settings.builder().put("s3.client.default.use_throttle_retries", throttling).build(); + final Settings settings = Settings.builder().put("s3.client.default.use_throttle_retries", throttling).build(); launchAWSConfigurationTest(settings, Protocol.HTTPS, null, -1, null, null, 3, throttling, 50000); } @@ -137,8 +151,8 @@ private void launchAWSConfigurationTest(Settings settings, boolean expectedUseThrottleRetries, int expectedReadTimeout) { - S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, "default"); - ClientConfiguration configuration = InternalAwsS3Service.buildConfiguration(clientSettings); + final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, "default"); + final ClientConfiguration configuration = InternalAwsS3Service.buildConfiguration(clientSettings); assertThat(configuration.getResponseMetadataCacheSize(), is(0)); assertThat(configuration.getProtocol(), is(expectedProtocol)); @@ -152,15 +166,15 @@ private void launchAWSConfigurationTest(Settings settings, } public void testEndpointSetting() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .put("s3.client.default.endpoint", "s3.endpoint") .build(); assertEndpoint(Settings.EMPTY, settings, "s3.endpoint"); } private void assertEndpoint(Settings repositorySettings, Settings settings, String expectedEndpoint) { - String configName = InternalAwsS3Service.CLIENT_NAME.get(repositorySettings); - S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, configName); + final String configName = S3Repository.CLIENT_NAME.get(repositorySettings); + final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, configName); assertThat(clientSettings.endpoint, is(expectedEndpoint)); } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java index 33d5d5fbc2038..d610e6d74a06d 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java @@ -157,6 +157,11 @@ public void deleteObject(final DeleteObjectRequest request) throws AmazonClientE throw exception; } } + + @Override + public void shutdown() { + // TODO check close + } @Override public DeleteObjectsResult deleteObjects(DeleteObjectsRequest request) throws SdkClientException { diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java new file mode 100644 index 0000000000000..f3bd894977999 --- /dev/null +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -0,0 +1,211 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.s3; + +import com.amazonaws.ClientConfiguration; +import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.services.s3.AmazonS3; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.security.AccessController; +import java.security.PrivilegedAction; + +import static org.hamcrest.Matchers.is; + +@SuppressForbidden(reason = "test fixture requires System.setProperty") +public class RepositoryCredentialsTests extends ESTestCase { + + static { + AccessController.doPrivileged((PrivilegedAction) () -> { + // required for client settings overwriting + System.setProperty("es.allow_insecure_settings", "true"); + return null; + }); + } + + static final class ProxyS3RepositoryPlugin extends S3RepositoryPlugin { + + static final class ClientAndCredentials extends AmazonS3Wrapper { + final AWSCredentialsProvider credentials; + + ClientAndCredentials(AmazonS3 delegate, AWSCredentialsProvider credentials) { + super(delegate); + this.credentials = credentials; + } + + @Override + public boolean doesBucketExist(String bucketName) { + return true; + } + } + + static final class ProxyInternalAwsS3Service extends InternalAwsS3Service { + + ProxyInternalAwsS3Service(Settings settings) { + super(settings); + } + + @Override + AmazonS3 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) { + final AmazonS3 client = super.buildClient(credentials, configuration); + return new ClientAndCredentials(client, credentials); + } + + } + + protected ProxyS3RepositoryPlugin(Settings settings) { + super(settings); + } + + @Override + protected AwsS3Service getAwsS3Service(Settings settings) { + return new ProxyInternalAwsS3Service(settings); + } + + } + + public void testRepositoryCredentialsOverrideSecureCredentials() throws IOException { + final int clientsCount = randomIntBetween(0, 4); + final String[] clientNames = new String[clientsCount + 1]; + clientNames[0] = "default"; + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.access_key", "secure_aws_key"); + secureSettings.setString("s3.client.default.secret_key", "secure_aws_secret"); + for (int i = 0; i < clientsCount; i++) { + final String clientName = "client_" + i; + secureSettings.setString("s3.client." + clientName + ".access_key", "secure_aws_key_" + i); + secureSettings.setString("s3.client." + clientName + ".secret_key", "secure_aws_secret_" + i); + clientNames[i + 1] = clientName; + } + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + // repository settings for credentials override node secure settings + final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() + .put(S3Repository.CLIENT_NAME.getKey(), randomFrom(clientNames)) + .put(S3Repository.ACCESS_KEY_SETTING.getKey(), "insecure_aws_key") + .put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret").build()); + try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(settings); + S3Repository s3repo = s3Plugin.getS3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); + AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { + final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials.getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); + assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); + } + assertWarnings( + "[secret_key] setting was deprecated in Elasticsearch and will be removed in a future release!" + + " See the breaking changes documentation for the next major version.", + "Using s3 access/secret key from repository settings. Instead store these in named clients and" + + " the elasticsearch keystore for secure settings.", + "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release!" + + " See the breaking changes documentation for the next major version."); + } + + public void testRepositoryCredentialsOnly() throws IOException { + // repository settings for credentials override node secure settings + final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", + Settings.builder() + .put(S3Repository.ACCESS_KEY_SETTING.getKey(), "insecure_aws_key") + .put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret") + .build()); + try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(Settings.EMPTY); + S3Repository s3repo = s3Plugin.getS3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); + AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { + final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials.getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); + assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); + } + assertWarnings( + "[secret_key] setting was deprecated in Elasticsearch and will be removed in a future release!" + + " See the breaking changes documentation for the next major version.", + "Using s3 access/secret key from repository settings. Instead store these in named clients and" + + " the elasticsearch keystore for secure settings.", + "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release!" + + " See the breaking changes documentation for the next major version."); + } + + public void testReinitSecureCredentials() throws IOException { + final String clientName = randomFrom("default", "some_client"); + // initial client node settings + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client." + clientName + ".access_key", "secure_aws_key"); + secureSettings.setString("s3.client." + clientName + ".secret_key", "secure_aws_secret"); + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + // repository settings + final Settings.Builder builder = Settings.builder().put(S3Repository.CLIENT_NAME.getKey(), clientName); + final boolean repositorySettings = randomBoolean(); + if (repositorySettings) { + builder.put(S3Repository.ACCESS_KEY_SETTING.getKey(), "insecure_aws_key"); + builder.put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret"); + } + final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", builder.build()); + try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(settings); + S3Repository s3repo = s3Plugin.getS3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY)) { + try (AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { + final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials + .getCredentials(); + if (repositorySettings) { + assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); + assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); + } else { + assertThat(credentials.getAWSAccessKeyId(), is("secure_aws_key")); + assertThat(credentials.getAWSSecretKey(), is("secure_aws_secret")); + } + // new settings + final MockSecureSettings newSecureSettings = new MockSecureSettings(); + newSecureSettings.setString("s3.client." + clientName + ".access_key", "new_secret_aws_key"); + newSecureSettings.setString("s3.client." + clientName + ".secret_key", "new_secret_aws_secret"); + final Settings newSettings = Settings.builder().setSecureSettings(newSecureSettings).build(); + // reload S3 plugin settings + s3Plugin.reload(newSettings); + // check the not-yet-closed client reference still has the same credentials + if (repositorySettings) { + assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); + assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); + } else { + assertThat(credentials.getAWSAccessKeyId(), is("secure_aws_key")); + assertThat(credentials.getAWSSecretKey(), is("secure_aws_secret")); + } + } + // check credentials have been updated + try (AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { + final AWSCredentials newCredentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials + .getCredentials(); + assertThat(newCredentials.getAWSAccessKeyId(), is("new_secret_aws_key")); + assertThat(newCredentials.getAWSSecretKey(), is("new_secret_aws_secret")); + } + } + if (repositorySettings) { + assertWarnings( + "[secret_key] setting was deprecated in Elasticsearch and will be removed in a future release!" + + " See the breaking changes documentation for the next major version.", + "Using s3 access/secret key from repository settings. Instead store these in named clients and" + + " the elasticsearch keystore for secure settings.", + "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release!" + + " See the breaking changes documentation for the next major version."); + } + } + +} diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositorySettingsCredentialsTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositorySettingsCredentialsTests.java deleted file mode 100644 index c3e7069fdfd65..0000000000000 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositorySettingsCredentialsTests.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.repositories.s3; - -import com.amazonaws.auth.AWSCredentials; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; - -public class RepositorySettingsCredentialsTests extends ESTestCase { - - public void testRepositorySettingsCredentials() { - Settings repositorySettings = Settings.builder() - .put(S3Repository.ACCESS_KEY_SETTING.getKey(), "aws_key") - .put(S3Repository.SECRET_KEY_SETTING.getKey(), "aws_secret").build(); - AWSCredentials credentials = InternalAwsS3Service.buildCredentials(logger, deprecationLogger, - S3ClientSettings.getClientSettings(Settings.EMPTY, "default"), repositorySettings).getCredentials(); - assertEquals("aws_key", credentials.getAWSAccessKeyId()); - assertEquals("aws_secret", credentials.getAWSSecretKey()); - assertSettingDeprecationsAndWarnings(new Setting[] { S3Repository.ACCESS_KEY_SETTING, S3Repository.SECRET_KEY_SETTING }, - "Using s3 access/secret key from repository settings. " + - "Instead store these in named clients and the elasticsearch keystore for secure settings."); - } -} diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java index c760e86d1353f..b2afd826c5b8e 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java @@ -57,6 +57,7 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import static org.mockito.Mockito.doAnswer; public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { @@ -74,7 +75,7 @@ public void testExecuteSingleUploadBlobSizeTooLarge() { final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> blobContainer.executeSingleUpload(blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize)); assertEquals("Upload request size [" + blobSize + "] can't be larger than 5gb", e.getMessage()); } @@ -86,7 +87,7 @@ public void testExecuteSingleUploadBlobSizeLargerThanBufferSize() { final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); final String blobName = randomAlphaOfLengthBetween(1, 10); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> blobContainer.executeSingleUpload(blobStore, blobName, new ByteArrayInputStream(new byte[0]), ByteSizeUnit.MB.toBytes(2))); assertEquals("Upload request size [2097152] can't be larger than buffer size", e.getMessage()); } @@ -121,7 +122,8 @@ public void testExecuteSingleUpload() throws IOException { } final AmazonS3 client = mock(AmazonS3.class); - when(blobStore.client()).thenReturn(client); + final AmazonS3Reference clientReference = new AmazonS3Reference(client); + when(blobStore.clientReference()).thenReturn(clientReference); final ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(PutObjectRequest.class); when(client.putObject(argumentCaptor.capture())).thenReturn(new PutObjectResult()); @@ -146,7 +148,7 @@ public void testExecuteMultipartUploadBlobSizeTooLarge() { final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> blobContainer.executeMultipartUpload(blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) ); assertEquals("Multipart upload request size [" + blobSize + "] can't be larger than 5tb", e.getMessage()); @@ -157,7 +159,7 @@ public void testExecuteMultipartUploadBlobSizeTooSmall() { final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> blobContainer.executeMultipartUpload(blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) ); assertEquals("Multipart upload request size [" + blobSize + "] can't be smaller than 5mb", e.getMessage()); @@ -191,7 +193,8 @@ public void testExecuteMultipartUpload() throws IOException { } final AmazonS3 client = mock(AmazonS3.class); - when(blobStore.client()).thenReturn(client); + final AmazonS3Reference clientReference = new AmazonS3Reference(client); + when(blobStore.clientReference()).thenReturn(clientReference); final ArgumentCaptor initArgCaptor = ArgumentCaptor.forClass(InitiateMultipartUploadRequest.class); final InitiateMultipartUploadResult initResult = new InitiateMultipartUploadResult(); @@ -201,7 +204,7 @@ public void testExecuteMultipartUpload() throws IOException { final ArgumentCaptor uploadArgCaptor = ArgumentCaptor.forClass(UploadPartRequest.class); final List expectedEtags = new ArrayList<>(); - long partSize = Math.min(bufferSize, blobSize); + final long partSize = Math.min(bufferSize, blobSize); long totalBytes = 0; do { expectedEtags.add(randomAlphaOfLength(50)); @@ -238,7 +241,7 @@ public void testExecuteMultipartUpload() throws IOException { assertEquals(numberOfParts.v1().intValue(), uploadRequests.size()); for (int i = 0; i < uploadRequests.size(); i++) { - UploadPartRequest uploadRequest = uploadRequests.get(i); + final UploadPartRequest uploadRequest = uploadRequests.get(i); assertEquals(bucketName, uploadRequest.getBucketName()); assertEquals(blobPath.buildAsString() + blobName, uploadRequest.getKey()); @@ -260,7 +263,7 @@ public void testExecuteMultipartUpload() throws IOException { assertEquals(blobPath.buildAsString() + blobName, compRequest.getKey()); assertEquals(initResult.getUploadId(), compRequest.getUploadId()); - List actualETags = compRequest.getPartETags().stream().map(PartETag::getETag).collect(Collectors.toList()); + final List actualETags = compRequest.getPartETags().stream().map(PartETag::getETag).collect(Collectors.toList()); assertEquals(expectedEtags, actualETags); } @@ -278,7 +281,11 @@ public void testExecuteMultipartUploadAborted() { when(blobStore.getStorageClass()).thenReturn(randomFrom(StorageClass.values())); final AmazonS3 client = mock(AmazonS3.class); - when(blobStore.client()).thenReturn(client); + final AmazonS3Reference clientReference = new AmazonS3Reference(client); + doAnswer(invocation -> { + clientReference.incRef(); + return clientReference; + }).when(blobStore).clientReference(); final String uploadId = randomAlphaOfLength(25); @@ -360,7 +367,7 @@ public void testExecuteMultipartUploadAborted() { } public void testNumberOfMultipartsWithZeroPartSize() { - IllegalArgumentException e = + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> S3BlobContainer.numberOfMultiparts(randomNonNegativeLong(), 0L)); assertEquals("Part size must be greater than zero", e.getMessage()); } @@ -382,7 +389,7 @@ public void testNumberOfMultiparts() { // Fits in N parts plus a bit more final long remaining = randomIntBetween(1, (size > Integer.MAX_VALUE) ? Integer.MAX_VALUE : (int) size - 1); - assertNumberOfMultiparts(factor + 1, remaining, size * factor + remaining, size); + assertNumberOfMultiparts(factor + 1, remaining, (size * factor) + remaining, size); } private static void assertNumberOfMultiparts(final int expectedParts, final long expectedRemaining, long totalSize, long partSize) { diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index e599f84b411e4..2843390f1aa80 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.repositories.s3; -import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.CannedAccessControlList; import com.amazonaws.services.s3.model.StorageClass; @@ -50,7 +49,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; -import static java.util.Collections.emptyMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.not; @@ -87,11 +85,9 @@ public static void wipeRepository() { @Override protected void createTestRepository(final String name) { - assertAcked(client().admin().cluster().preparePutRepository(name) - .setType(S3Repository.TYPE) - .setSettings(Settings.builder() + assertAcked(client().admin().cluster().preparePutRepository(name).setType(S3Repository.TYPE).setSettings(Settings.builder() .put(S3Repository.BUCKET_SETTING.getKey(), bucket) - .put(InternalAwsS3Service.CLIENT_NAME.getKey(), client) + .put(S3Repository.CLIENT_NAME.getKey(), client) .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), bufferSize) .put(S3Repository.SERVER_SIDE_ENCRYPTION_SETTING.getKey(), serverSideEncryption) .put(S3Repository.CANNED_ACL_SETTING.getKey(), cannedACL) @@ -113,13 +109,17 @@ public TestS3RepositoryPlugin(final Settings settings) { @Override public Map getRepositories(final Environment env, final NamedXContentRegistry registry) { - return Collections.singletonMap(S3Repository.TYPE, (metadata) -> - new S3Repository(metadata, env.settings(), registry, new InternalAwsS3Service(env.settings(), emptyMap()) { - @Override - public synchronized AmazonS3 client(final Settings repositorySettings) { - return new MockAmazonS3(blobs, bucket, serverSideEncryption, cannedACL, storageClass); - } - })); + return Collections.singletonMap(S3Repository.TYPE, + (metadata) -> new S3Repository(metadata, env.settings(), registry, new InternalAwsS3Service(env.settings()) { + @Override + public synchronized AmazonS3Reference client(String clientName) { + return new AmazonS3Reference(new MockAmazonS3(blobs, bucket, serverSideEncryption, cannedACL, storageClass)); + } + }) { + @Override + void overrideCredentialsFromClusterState(AwsS3Service awsService) { + } + }); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java index 4a23e4efa9a29..a44946b6b3ffa 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java @@ -115,7 +115,15 @@ public static S3BlobStore randomMockS3BlobStore() { storageClass = randomValueOtherThan(StorageClass.Glacier, () -> randomFrom(StorageClass.values())).toString(); } - AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass); - return new S3BlobStore(Settings.EMPTY, client, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); + final String theClientName = randomAlphaOfLength(4); + final AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass); + final AwsS3Service service = new InternalAwsS3Service(Settings.EMPTY) { + @Override + public synchronized AmazonS3Reference client(String clientName) { + assert theClientName.equals(clientName); + return new AmazonS3Reference(client); + } + }; + return new S3BlobStore(Settings.EMPTY, service, theClientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index 7da65c27d8194..5c0aada66585c 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.services.s3.AbstractAmazonS3; -import com.amazonaws.services.s3.AmazonS3; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -31,18 +30,25 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; - import java.io.IOException; +import java.util.Collections; +import java.util.Map; import static org.hamcrest.Matchers.containsString; public class S3RepositoryTests extends ESTestCase { private static class DummyS3Client extends AbstractAmazonS3 { + @Override public boolean doesBucketExist(String bucketName) { return true; } + + @Override + public void shutdown() { + // TODO check is closed + } } private static class DummyS3Service extends AbstractLifecycleComponent implements AwsS3Service { @@ -56,53 +62,70 @@ protected void doStop() {} @Override protected void doClose() {} @Override - public AmazonS3 client(Settings settings) { - return new DummyS3Client(); + public AmazonS3Reference client(String clientName) { + return new AmazonS3Reference(new DummyS3Client()); + } + + @Override + public Map refreshAndClearCache(Map clientsSettings) { + return Collections.emptyMap(); + } + + @Override + public void close() { } } public void testInvalidChunkBufferSizeSettings() throws IOException { // chunk < buffer should fail - assertInvalidBuffer(10, 5, RepositoryException.class, "chunk_size (5mb) can't be lower than buffer_size (10mb)."); + final Settings s1 = bufferAndChunkSettings(10, 5); + final Exception e1 = expectThrows(RepositoryException.class, + () -> new S3Repository(getRepositoryMetaData(s1), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service())); + assertThat(e1.getMessage(), containsString("chunk_size (5mb) can't be lower than buffer_size (10mb)")); // chunk > buffer should pass - assertValidBuffer(5, 10); + final Settings s2 = bufferAndChunkSettings(5, 10); + new S3Repository(getRepositoryMetaData(s2), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()).close(); // chunk = buffer should pass - assertValidBuffer(5, 5); + final Settings s3 = bufferAndChunkSettings(5, 5); + new S3Repository(getRepositoryMetaData(s3), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()).close(); // buffer < 5mb should fail - assertInvalidBuffer(4, 10, IllegalArgumentException.class, - "failed to parse value [4mb] for setting [buffer_size], must be >= [5mb]"); - // chunk > 5tb should fail - assertInvalidBuffer(5, 6000000, IllegalArgumentException.class, - "failed to parse value [6000000mb] for setting [chunk_size], must be <= [5tb]"); + final Settings s4 = bufferAndChunkSettings(4, 10); + final IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, + () -> new S3Repository(getRepositoryMetaData(s4), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()) + .close()); + assertThat(e2.getMessage(), containsString("failed to parse value [4mb] for setting [buffer_size], must be >= [5mb]")); + final Settings s5 = bufferAndChunkSettings(5, 6000000); + final IllegalArgumentException e3 = expectThrows(IllegalArgumentException.class, + () -> new S3Repository(getRepositoryMetaData(s5), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()) + .close()); + assertThat(e3.getMessage(), containsString("failed to parse value [6000000mb] for setting [chunk_size], must be <= [5tb]")); } - private void assertValidBuffer(long bufferMB, long chunkMB) throws IOException { - RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() - .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), new ByteSizeValue(bufferMB, ByteSizeUnit.MB).getStringRep()) - .put(S3Repository.CHUNK_SIZE_SETTING.getKey(), new ByteSizeValue(chunkMB, ByteSizeUnit.MB).getStringRep()).build()); - new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()); + private Settings bufferAndChunkSettings(long buffer, long chunk) { + return Settings.builder() + .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), new ByteSizeValue(buffer, ByteSizeUnit.MB).getStringRep()) + .put(S3Repository.CHUNK_SIZE_SETTING.getKey(), new ByteSizeValue(chunk, ByteSizeUnit.MB).getStringRep()) + .build(); } - private void assertInvalidBuffer(int bufferMB, int chunkMB, Class clazz, String msg) throws IOException { - RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() - .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), new ByteSizeValue(bufferMB, ByteSizeUnit.MB).getStringRep()) - .put(S3Repository.CHUNK_SIZE_SETTING.getKey(), new ByteSizeValue(chunkMB, ByteSizeUnit.MB).getStringRep()).build()); - - Exception e = expectThrows(clazz, () -> new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, - new DummyS3Service())); - assertThat(e.getMessage(), containsString(msg)); + private RepositoryMetaData getRepositoryMetaData(Settings settings) { + return new RepositoryMetaData("dummy-repo", "mock", Settings.builder().put(settings).build()); } public void testBasePathSetting() throws IOException { - RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() - .put(S3Repository.BASE_PATH_SETTING.getKey(), "foo/bar").build()); - S3Repository s3repo = new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()); - assertEquals("foo/bar/", s3repo.basePath().buildAsString()); + final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() + .put(S3Repository.BASE_PATH_SETTING.getKey(), "foo/bar").build()); + try (S3Repository s3repo = new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service())) { + assertEquals("foo/bar/", s3repo.basePath().buildAsString()); + } } - public void testDefaultBufferSize() { - ByteSizeValue defaultBufferSize = S3Repository.BUFFER_SIZE_SETTING.get(Settings.EMPTY); - assertThat(defaultBufferSize, Matchers.lessThanOrEqualTo(new ByteSizeValue(100, ByteSizeUnit.MB))); - assertThat(defaultBufferSize, Matchers.greaterThanOrEqualTo(new ByteSizeValue(5, ByteSizeUnit.MB))); + public void testDefaultBufferSize() throws IOException { + final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.EMPTY); + try (S3Repository s3repo = new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service())) { + final long defaultBufferSize = ((S3BlobStore) s3repo.blobStore()).bufferSizeInBytes(); + assertThat(defaultBufferSize, Matchers.lessThanOrEqualTo(100L * 1024 * 1024)); + assertThat(defaultBufferSize, Matchers.greaterThanOrEqualTo(5L * 1024 * 1024)); + } } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAmazonS3.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAmazonS3.java index 93bf58cc28964..0c762659a5fe0 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAmazonS3.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAmazonS3.java @@ -51,7 +51,7 @@ public class TestAmazonS3 extends AmazonS3Wrapper { private double writeFailureRate = 0.0; private double readFailureRate = 0.0; - private String randomPrefix; + private final String randomPrefix; ConcurrentMap accessCounts = new ConcurrentHashMap<>(); @@ -76,18 +76,18 @@ public TestAmazonS3(AmazonS3 delegate, Settings settings) { @Override public PutObjectResult putObject(String bucketName, String key, InputStream input, ObjectMetadata metadata) throws AmazonClientException, AmazonServiceException { if (shouldFail(bucketName, key, writeFailureRate)) { - long length = metadata.getContentLength(); - long partToRead = (long) (length * randomDouble()); - byte[] buffer = new byte[1024]; + final long length = metadata.getContentLength(); + final long partToRead = (long) (length * randomDouble()); + final byte[] buffer = new byte[1024]; for (long cur = 0; cur < partToRead; cur += buffer.length) { try { - input.read(buffer, 0, (int) (partToRead - cur > buffer.length ? buffer.length : partToRead - cur)); - } catch (IOException ex) { + input.read(buffer, 0, (int) ((partToRead - cur) > buffer.length ? buffer.length : partToRead - cur)); + } catch (final IOException ex) { throw new ElasticsearchException("cannot read input stream", ex); } } logger.info("--> random write failure on putObject method: throwing an exception for [bucket={}, key={}]", bucketName, key); - AmazonS3Exception ex = new AmazonS3Exception("Random S3 exception"); + final AmazonS3Exception ex = new AmazonS3Exception("Random S3 exception"); ex.setStatusCode(400); ex.setErrorCode("RequestTimeout"); throw ex; @@ -99,18 +99,18 @@ public PutObjectResult putObject(String bucketName, String key, InputStream inpu @Override public UploadPartResult uploadPart(UploadPartRequest request) throws AmazonClientException, AmazonServiceException { if (shouldFail(request.getBucketName(), request.getKey(), writeFailureRate)) { - long length = request.getPartSize(); - long partToRead = (long) (length * randomDouble()); - byte[] buffer = new byte[1024]; + final long length = request.getPartSize(); + final long partToRead = (long) (length * randomDouble()); + final byte[] buffer = new byte[1024]; for (long cur = 0; cur < partToRead; cur += buffer.length) { try (InputStream input = request.getInputStream()){ - input.read(buffer, 0, (int) (partToRead - cur > buffer.length ? buffer.length : partToRead - cur)); - } catch (IOException ex) { + input.read(buffer, 0, (int) ((partToRead - cur) > buffer.length ? buffer.length : partToRead - cur)); + } catch (final IOException ex) { throw new ElasticsearchException("cannot read input stream", ex); } } logger.info("--> random write failure on uploadPart method: throwing an exception for [bucket={}, key={}]", request.getBucketName(), request.getKey()); - AmazonS3Exception ex = new AmazonS3Exception("Random S3 write exception"); + final AmazonS3Exception ex = new AmazonS3Exception("Random S3 write exception"); ex.setStatusCode(400); ex.setErrorCode("RequestTimeout"); throw ex; @@ -123,7 +123,7 @@ public UploadPartResult uploadPart(UploadPartRequest request) throws AmazonClien public S3Object getObject(String bucketName, String key) throws AmazonClientException, AmazonServiceException { if (shouldFail(bucketName, key, readFailureRate)) { logger.info("--> random read failure on getObject method: throwing an exception for [bucket={}, key={}]", bucketName, key); - AmazonS3Exception ex = new AmazonS3Exception("Random S3 read exception"); + final AmazonS3Exception ex = new AmazonS3Exception("Random S3 read exception"); ex.setStatusCode(404); throw ex; } else { @@ -135,7 +135,7 @@ private boolean shouldFail(String bucketName, String key, double probability) { if (probability > 0.0) { String path = randomPrefix + "-" + bucketName + "+" + key; path += "/" + incrementAndGet(path); - return Math.abs(hashCode(path)) < Integer.MAX_VALUE * probability; + return Math.abs(hashCode(path)) < (Integer.MAX_VALUE * probability); } else { return false; } @@ -143,14 +143,14 @@ private boolean shouldFail(String bucketName, String key, double probability) { private int hashCode(String path) { try { - MessageDigest digest = MessageDigest.getInstance("MD5"); - byte[] bytes = digest.digest(path.getBytes("UTF-8")); + final MessageDigest digest = MessageDigest.getInstance("MD5"); + final byte[] bytes = digest.digest(path.getBytes("UTF-8")); int i = 0; return ((bytes[i++] & 0xFF) << 24) | ((bytes[i++] & 0xFF) << 16) | ((bytes[i++] & 0xFF) << 8) | (bytes[i++] & 0xFF); - } catch (UnsupportedEncodingException ex) { + } catch (final UnsupportedEncodingException ex) { throw new ElasticsearchException("cannot calculate hashcode", ex); - } catch (NoSuchAlgorithmException ex) { + } catch (final NoSuchAlgorithmException ex) { throw new ElasticsearchException("cannot calculate hashcode", ex); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAwsS3Service.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAwsS3Service.java index c5012d9c68bc7..f376f73820624 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAwsS3Service.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAwsS3Service.java @@ -22,45 +22,39 @@ import java.util.IdentityHashMap; import com.amazonaws.services.s3.AmazonS3; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.Settings; public class TestAwsS3Service extends InternalAwsS3Service { public static class TestPlugin extends S3RepositoryPlugin { public TestPlugin(Settings settings) { - super(settings); - } - @Override - protected AwsS3Service createStorageService(Settings settings) { - return new TestAwsS3Service(settings); + super(new TestAwsS3Service(settings)); } } - IdentityHashMap clients = new IdentityHashMap<>(); + IdentityHashMap clients = new IdentityHashMap<>(); public TestAwsS3Service(Settings settings) { - super(settings, S3ClientSettings.load(settings)); + super(settings); } @Override - public synchronized AmazonS3 client(Settings repositorySettings) { - return cachedWrapper(super.client(repositorySettings)); + public synchronized AmazonS3Reference client(String clientName) { + return new AmazonS3Reference(cachedWrapper(super.client(clientName))); } - private AmazonS3 cachedWrapper(AmazonS3 client) { - TestAmazonS3 wrapper = clients.get(client); + private AmazonS3 cachedWrapper(AmazonS3Reference clientReference) { + TestAmazonS3 wrapper = clients.get(clientReference); if (wrapper == null) { - wrapper = new TestAmazonS3(client, settings); - clients.put(client, wrapper); + wrapper = new TestAmazonS3(clientReference.client(), settings); + clients.put(clientReference, wrapper); } return wrapper; } @Override - protected synchronized void doClose() throws ElasticsearchException { - super.doClose(); + protected synchronized void releaseCachedClients() { + super.releaseCachedClients(); clients.clear(); } - } diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 17acf7c10f534..7ddb39b6d6225 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -29,6 +29,8 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsAction; +import org.elasticsearch.action.admin.cluster.node.reload.TransportNodesReloadSecureSettingsAction; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; @@ -241,6 +243,7 @@ import org.elasticsearch.rest.action.admin.cluster.RestPendingClusterTasksAction; import org.elasticsearch.rest.action.admin.cluster.RestPutRepositoryAction; import org.elasticsearch.rest.action.admin.cluster.RestPutStoredScriptAction; +import org.elasticsearch.rest.action.admin.cluster.RestReloadSecureSettingsAction; import org.elasticsearch.rest.action.admin.cluster.RestRemoteClusterInfoAction; import org.elasticsearch.rest.action.admin.cluster.RestRestoreSnapshotAction; import org.elasticsearch.rest.action.admin.cluster.RestSnapshotsStatusAction; @@ -491,6 +494,7 @@ public void reg actions.register(ExplainAction.INSTANCE, TransportExplainAction.class); actions.register(ClearScrollAction.INSTANCE, TransportClearScrollAction.class); actions.register(RecoveryAction.INSTANCE, TransportRecoveryAction.class); + actions.register(NodesReloadSecureSettingsAction.INSTANCE, TransportNodesReloadSecureSettingsAction.class); //Indexed scripts actions.register(PutStoredScriptAction.INSTANCE, TransportPutStoredScriptAction.class); @@ -610,6 +614,8 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestRecoveryAction(settings, restController)); + registerHandler.accept(new RestReloadSecureSettingsAction(settings, restController)); + // Scripts API registerHandler.accept(new RestGetStoredScriptAction(settings, restController)); registerHandler.accept(new RestPutStoredScriptAction(settings, restController)); diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceRemoteException.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java similarity index 56% rename from plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceRemoteException.java rename to server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java index 3f20e29505751..ccaeca8702f0b 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceRemoteException.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java @@ -17,14 +17,23 @@ * under the License. */ -package org.elasticsearch.repositories.azure; +package org.elasticsearch.action.admin.cluster.node.reload; -public class AzureServiceRemoteException extends IllegalStateException { - public AzureServiceRemoteException(String msg) { - super(msg); +import org.elasticsearch.action.Action; + +public class NodesReloadSecureSettingsAction + extends Action { + + public static final NodesReloadSecureSettingsAction INSTANCE = new NodesReloadSecureSettingsAction(); + public static final String NAME = "cluster:admin/nodes/reload_secure_settings"; + + private NodesReloadSecureSettingsAction() { + super(NAME); } - public AzureServiceRemoteException(String msg, Throwable cause) { - super(msg, cause); + @Override + public NodesReloadSecureSettingsResponse newResponse() { + return new NodesReloadSecureSettingsResponse(); } + } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java new file mode 100644 index 0000000000000..50df7b1bb26e0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java @@ -0,0 +1,160 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.reload; + + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.SecureString; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.CharBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * Request for a reload secure settings action + */ +public class NodesReloadSecureSettingsRequest extends BaseNodesRequest { + + /** + * The password which is broadcasted to all nodes, but is never stored on + * persistent storage. The password is used to reread and decrypt the contents + * of the node's keystore (backing the implementation of + * {@code SecureSettings}). + */ + private SecureString secureSettingsPassword; + + public NodesReloadSecureSettingsRequest() { + } + + /** + * Reload secure settings only on certain nodes, based on the nodes ids + * specified. If none are passed, secure settings will be reloaded on all the + * nodes. + */ + public NodesReloadSecureSettingsRequest(String... nodesIds) { + super(nodesIds); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (secureSettingsPassword == null) { + validationException = addValidationError("secure settings password cannot be null (use empty string instead)", + validationException); + } + return validationException; + } + + public SecureString secureSettingsPassword() { + return secureSettingsPassword; + } + + public NodesReloadSecureSettingsRequest secureStorePassword(SecureString secureStorePassword) { + this.secureSettingsPassword = secureStorePassword; + return this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + final byte[] passwordBytes = in.readByteArray(); + try { + this.secureSettingsPassword = new SecureString(utf8BytesToChars(passwordBytes)); + } finally { + Arrays.fill(passwordBytes, (byte) 0); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + final byte[] passwordBytes = charsToUtf8Bytes(this.secureSettingsPassword.getChars()); + try { + out.writeByteArray(passwordBytes); + } finally { + Arrays.fill(passwordBytes, (byte) 0); + } + } + + /** + * Encodes the provided char[] to a UTF-8 byte[]. This is done while avoiding + * conversions to String. The provided char[] is not modified by this method, so + * the caller needs to take care of clearing the value if it is sensitive. + */ + private static byte[] charsToUtf8Bytes(char[] chars) { + final CharBuffer charBuffer = CharBuffer.wrap(chars); + final ByteBuffer byteBuffer = StandardCharsets.UTF_8.encode(charBuffer); + final byte[] bytes; + if (byteBuffer.hasArray()) { + // there is no guarantee that the byte buffers backing array is the right size + // so we need to make a copy + bytes = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.position(), byteBuffer.limit()); + Arrays.fill(byteBuffer.array(), (byte) 0); // clear sensitive data + } else { + final int length = byteBuffer.limit() - byteBuffer.position(); + bytes = new byte[length]; + byteBuffer.get(bytes); + // if the buffer is not read only we can reset and fill with 0's + if (byteBuffer.isReadOnly() == false) { + byteBuffer.clear(); // reset + for (int i = 0; i < byteBuffer.limit(); i++) { + byteBuffer.put((byte) 0); + } + } + } + return bytes; + } + + /** + * Decodes the provided byte[] to a UTF-8 char[]. This is done while avoiding + * conversions to String. The provided byte[] is not modified by this method, so + * the caller needs to take care of clearing the value if it is sensitive. + */ + public static char[] utf8BytesToChars(byte[] utf8Bytes) { + final ByteBuffer byteBuffer = ByteBuffer.wrap(utf8Bytes); + final CharBuffer charBuffer = StandardCharsets.UTF_8.decode(byteBuffer); + final char[] chars; + if (charBuffer.hasArray()) { + // there is no guarantee that the char buffers backing array is the right size + // so we need to make a copy + chars = Arrays.copyOfRange(charBuffer.array(), charBuffer.position(), charBuffer.limit()); + Arrays.fill(charBuffer.array(), (char) 0); // clear sensitive data + } else { + final int length = charBuffer.limit() - charBuffer.position(); + chars = new char[length]; + charBuffer.get(chars); + // if the buffer is not read only we can reset and fill with 0's + if (charBuffer.isReadOnly() == false) { + charBuffer.clear(); // reset + for (int i = 0; i < charBuffer.limit(); i++) { + charBuffer.put((char) 0); + } + } + } + return chars; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java new file mode 100644 index 0000000000000..b5f2f73e56f51 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.reload; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Objects; + +/** + * Builder for the reload secure settings nodes request + */ +public class NodesReloadSecureSettingsRequestBuilder extends NodesOperationRequestBuilder { + + public static final String SECURE_SETTINGS_PASSWORD_FIELD_NAME = "secure_settings_password"; + + public NodesReloadSecureSettingsRequestBuilder(ElasticsearchClient client, NodesReloadSecureSettingsAction action) { + super(client, action, new NodesReloadSecureSettingsRequest()); + } + + public NodesReloadSecureSettingsRequestBuilder setSecureStorePassword(SecureString secureStorePassword) { + request.secureStorePassword(secureStorePassword); + return this; + } + + public NodesReloadSecureSettingsRequestBuilder source(BytesReference source, XContentType xContentType) throws IOException { + Objects.requireNonNull(xContentType); + // EMPTY is ok here because we never call namedObject + try (InputStream stream = source.streamInput(); + XContentParser parser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, stream)) { + XContentParser.Token token; + token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("expected an object, but found token [{}]", token); + } + token = parser.nextToken(); + if (token != XContentParser.Token.FIELD_NAME || false == SECURE_SETTINGS_PASSWORD_FIELD_NAME.equals(parser.currentName())) { + throw new ElasticsearchParseException("expected a field named [{}], but found [{}]", SECURE_SETTINGS_PASSWORD_FIELD_NAME, + token); + } + token = parser.nextToken(); + if (token != XContentParser.Token.VALUE_STRING) { + throw new ElasticsearchParseException("expected field [{}] to be of type string, but found [{}] instead", + SECURE_SETTINGS_PASSWORD_FIELD_NAME, token); + } + final String password = parser.text(); + setSecureStorePassword(new SecureString(password.toCharArray())); + token = parser.nextToken(); + if (token != XContentParser.Token.END_OBJECT) { + throw new ElasticsearchParseException("expected end of object, but found token [{}]", token); + } + } + return this; + } + +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java new file mode 100644 index 0000000000000..394b1f10dc2d9 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java @@ -0,0 +1,149 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.reload; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import java.io.IOException; +import java.util.List; + +/** + * The response for the reload secure settings action + */ +public class NodesReloadSecureSettingsResponse extends BaseNodesResponse + implements ToXContentFragment { + + public NodesReloadSecureSettingsResponse() { + } + + public NodesReloadSecureSettingsResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(NodeResponse::readNodeResponse); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("nodes"); + for (final NodesReloadSecureSettingsResponse.NodeResponse node : getNodes()) { + builder.startObject(node.getNode().getId()); + builder.field("name", node.getNode().getName()); + final Exception e = node.reloadException(); + if (e != null) { + builder.startObject("reload_exception"); + ElasticsearchException.generateThrowableXContent(builder, params, e); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public String toString() { + try { + final XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + return Strings.toString(builder); + } catch (final IOException e) { + return "{ \"error\" : \"" + e.getMessage() + "\"}"; + } + } + + public static class NodeResponse extends BaseNodeResponse { + + private Exception reloadException = null; + + public NodeResponse() { + } + + public NodeResponse(DiscoveryNode node, Exception reloadException) { + super(node); + this.reloadException = reloadException; + } + + public Exception reloadException() { + return this.reloadException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + if (in.readBoolean()) { + reloadException = in.readException(); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (reloadException != null) { + out.writeBoolean(true); + out.writeException(reloadException); + } else { + out.writeBoolean(false); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final NodesReloadSecureSettingsResponse.NodeResponse that = (NodesReloadSecureSettingsResponse.NodeResponse) o; + return reloadException != null ? reloadException.equals(that.reloadException) : that.reloadException == null; + } + + @Override + public int hashCode() { + return reloadException != null ? reloadException.hashCode() : 0; + } + + public static NodeResponse readNodeResponse(StreamInput in) throws IOException { + final NodeResponse node = new NodeResponse(); + node.readFrom(in); + return node; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java new file mode 100644 index 0000000000000..cb870e58d3187 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.reload; + +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.plugins.ReloadablePlugin; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class TransportNodesReloadSecureSettingsAction extends TransportNodesAction { + + private final Environment environment; + private final PluginsService pluginsService; + + @Inject + public TransportNodesReloadSecureSettingsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, Environment environment, + PluginsService pluginService) { + super(settings, NodesReloadSecureSettingsAction.NAME, threadPool, clusterService, transportService, actionFilters, + indexNameExpressionResolver, NodesReloadSecureSettingsRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC, + NodesReloadSecureSettingsResponse.NodeResponse.class); + this.environment = environment; + this.pluginsService = pluginService; + } + + @Override + protected NodesReloadSecureSettingsResponse newResponse(NodesReloadSecureSettingsRequest request, + List responses, + List failures) { + return new NodesReloadSecureSettingsResponse(clusterService.getClusterName(), responses, failures); + } + + @Override + protected NodeRequest newNodeRequest(String nodeId, NodesReloadSecureSettingsRequest request) { + return new NodeRequest(nodeId, request); + } + + @Override + protected NodesReloadSecureSettingsResponse.NodeResponse newNodeResponse() { + return new NodesReloadSecureSettingsResponse.NodeResponse(); + } + + @Override + protected NodesReloadSecureSettingsResponse.NodeResponse nodeOperation(NodeRequest nodeReloadRequest) { + final NodesReloadSecureSettingsRequest request = nodeReloadRequest.request; + final SecureString secureSettingsPassword = request.secureSettingsPassword(); + try (KeyStoreWrapper keystore = KeyStoreWrapper.load(environment.configFile())) { + // reread keystore from config file + if (keystore == null) { + return new NodesReloadSecureSettingsResponse.NodeResponse(clusterService.localNode(), + new IllegalStateException("Keystore is missing")); + } + // decrypt the keystore using the password from the request + keystore.decrypt(secureSettingsPassword.getChars()); + // add the keystore to the original node settings object + final Settings settingsWithKeystore = Settings.builder() + .put(environment.settings(), false) + .setSecureSettings(keystore) + .build(); + final List exceptions = new ArrayList<>(); + // broadcast the new settings object (with the open embedded keystore) to all reloadable plugins + pluginsService.filterPlugins(ReloadablePlugin.class).stream().forEach(p -> { + try { + p.reload(settingsWithKeystore); + } catch (final Exception e) { + logger.warn((Supplier) () -> new ParameterizedMessage("Reload failed for plugin [{}]", p.getClass().getSimpleName()), + e); + exceptions.add(e); + } + }); + ExceptionsHelper.rethrowAndSuppress(exceptions); + return new NodesReloadSecureSettingsResponse.NodeResponse(clusterService.localNode(), null); + } catch (final Exception e) { + return new NodesReloadSecureSettingsResponse.NodeResponse(clusterService.localNode(), e); + } + } + + public static class NodeRequest extends BaseNodeRequest { + + NodesReloadSecureSettingsRequest request; + + public NodeRequest() { + } + + NodeRequest(String nodeId, NodesReloadSecureSettingsRequest request) { + super(nodeId); + this.request = request; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + request = new NodesReloadSecureSettingsRequest(); + request.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + request.writeTo(out); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java b/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java index 5b21036b8cd4f..949b0110fff20 100644 --- a/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java @@ -33,6 +33,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -185,6 +186,11 @@ public interface ClusterAdminClient extends ElasticsearchClient { */ ClusterUpdateSettingsRequestBuilder prepareUpdateSettings(); + /** + * Re initialize each cluster node and pass them the secret store password. + */ + NodesReloadSecureSettingsRequestBuilder prepareReloadSecureSettings(); + /** * Reroutes allocation of shards. Advance API. */ diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 41c1d245d39ca..dc70da4e61f7e 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -41,6 +41,8 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsAction; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; @@ -771,6 +773,11 @@ public ClusterUpdateSettingsRequestBuilder prepareUpdateSettings() { return new ClusterUpdateSettingsRequestBuilder(this, ClusterUpdateSettingsAction.INSTANCE); } + @Override + public NodesReloadSecureSettingsRequestBuilder prepareReloadSecureSettings() { + return new NodesReloadSecureSettingsRequestBuilder(this, NodesReloadSecureSettingsAction.INSTANCE); + } + @Override public ActionFuture nodesInfo(final NodesInfoRequest request) { return execute(NodesInfoAction.INSTANCE, request); diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index f47760491f8d5..3a8a06949b29c 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -308,7 +308,9 @@ public void decrypt(char[] password) throws GeneralSecurityException, IOExceptio } if (formatVersion <= 2) { decryptLegacyEntries(); - assert password.length == 0; + if (password.length != 0) { + throw new IllegalArgumentException("Keystore format does not accept non-empty passwords"); + } return; } diff --git a/server/src/main/java/org/elasticsearch/common/util/LazyInitializable.java b/server/src/main/java/org/elasticsearch/common/util/LazyInitializable.java new file mode 100644 index 0000000000000..ad3a3bcc299d0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/util/LazyInitializable.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.util; + +import org.elasticsearch.common.CheckedSupplier; + +import java.util.Objects; +import java.util.function.Consumer; + +/** + * Encapsulates a {@link CheckedSupplier} which is lazily invoked once on the + * first call to {@code #getOrCompute()}. The value which the + * supplier returns is memorized and will be served until + * {@code #reset()} is called. Each value returned by {@code #getOrCompute()}, + * newly minted or cached, will be passed to the onGet + * {@link Consumer}. On {@code #reset()} the value will be passed to the + * onReset {@code Consumer} and the next {@code #getOrCompute()} + * will regenerate the value. + */ +public final class LazyInitializable { + + private final CheckedSupplier supplier; + private final Consumer onGet; + private final Consumer onReset; + private volatile T value; + + /** + * Creates the simple LazyInitializable instance. + * + * @param supplier + * The {@code CheckedSupplier} to generate values which will be + * served on {@code #getOrCompute()} invocations. + */ + public LazyInitializable(CheckedSupplier supplier) { + this(supplier, v -> {}, v -> {}); + } + + /** + * Creates the complete LazyInitializable instance. + * + * @param supplier + * The {@code CheckedSupplier} to generate values which will be + * served on {@code #getOrCompute()} invocations. + * @param onGet + * A {@code Consumer} which is called on each value, newly forged or + * stale, that is returned by {@code #getOrCompute()} + * @param onReset + * A {@code Consumer} which is invoked on the value that will be + * erased when calling {@code #reset()} + */ + public LazyInitializable(CheckedSupplier supplier, Consumer onGet, Consumer onReset) { + this.supplier = supplier; + this.onGet = onGet; + this.onReset = onReset; + } + + /** + * Returns a value that was created by supplier. The value might + * have been previously created, if not it will be created now, thread safe of + * course. + */ + public T getOrCompute() throws E { + final T readOnce = value; // Read volatile just once... + final T result = readOnce == null ? maybeCompute(supplier) : readOnce; + onGet.accept(result); + return result; + } + + /** + * Clears the value, if it has been previously created by calling + * {@code #getOrCompute()}. The onReset will be called on this + * value. The next call to {@code #getOrCompute()} will recreate the value. + */ + public synchronized void reset() { + if (value != null) { + onReset.accept(value); + value = null; + } + } + + /** + * Creates a new value thread safely. + */ + private synchronized T maybeCompute(CheckedSupplier supplier) throws E { + if (value == null) { + value = Objects.requireNonNull(supplier.get()); + } + return value; + } + +} diff --git a/server/src/main/java/org/elasticsearch/plugins/Plugin.java b/server/src/main/java/org/elasticsearch/plugins/Plugin.java index 0ef703448b799..65d47682a95c0 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -74,6 +74,7 @@ *
  • {@link RepositoryPlugin} *
  • {@link ScriptPlugin} *
  • {@link SearchPlugin} + *
  • {@link ReloadablePlugin} * *

    In addition to extension points this class also declares some {@code @Deprecated} {@code public final void onModule} methods. These * methods should cause any extensions of {@linkplain Plugin} that used the pre-5.x style extension syntax to fail to build and point the diff --git a/server/src/main/java/org/elasticsearch/plugins/ReloadablePlugin.java b/server/src/main/java/org/elasticsearch/plugins/ReloadablePlugin.java new file mode 100644 index 0000000000000..86d7759185e69 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/ReloadablePlugin.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugins; + +import org.elasticsearch.common.settings.Settings; + +/** + * An extension point for {@link Plugin}s that can be reloaded. There is no + * clear definition about what reloading a plugin actually means. When a plugin + * is reloaded it might rebuild any internal members. Plugins usually implement + * this interface in order to reread the values of {@code SecureSetting}s and + * then rebuild any dependent internal members. + */ +public interface ReloadablePlugin { + /** + * Called to trigger the rebuilt of the plugin's internal members. The reload + * operation is required to have been completed when the method returns. + * Strictly speaking, the settings argument should not be accessed + * outside of this method's call stack, as any values stored in the node's + * keystore (see {@code SecureSetting}) will not otherwise be retrievable. The + * setting values do not follow dynamic updates, i.e. the values are identical + * to the ones during the initial plugin loading, barring the keystore file on + * disk changes. Any failure during the operation should be signaled by raising + * an exception, but the plugin should otherwise continue to function + * unperturbed. + * + * @param settings + * Settings used while reloading the plugin. All values are + * retrievable, including the values stored in the node's keystore. + * The setting values are the initial ones, from when the node has be + * started, i.e. they don't follow dynamic updates. + * @throws Exception + * if the operation failed. The plugin should continue to operate as + * if the offending call didn't happen. + */ + void reload(Settings settings) throws Exception; +} \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java new file mode 100644 index 0000000000000..0697871ea5d1c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.cluster; + +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequest; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsResponse; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestActions; +import org.elasticsearch.rest.action.RestBuilderListener; + +import java.io.IOException; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public final class RestReloadSecureSettingsAction extends BaseRestHandler { + + public RestReloadSecureSettingsAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(POST, "/_nodes/reload_secure_settings", this); + controller.registerHandler(POST, "/_nodes/{nodeId}/reload_secure_settings", this); + } + + @Override + public String getName() { + return "nodes_reload_action"; + } + + @Override + public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + final String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")); + final NodesReloadSecureSettingsRequestBuilder nodesRequestBuilder = client.admin() + .cluster() + .prepareReloadSecureSettings() + .setTimeout(request.param("timeout")) + .source(request.requiredContent(), request.getXContentType()) + .setNodesIds(nodesIds); + final NodesReloadSecureSettingsRequest nodesRequest = nodesRequestBuilder.request(); + return channel -> nodesRequestBuilder + .execute(new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(NodesReloadSecureSettingsResponse response, XContentBuilder builder) + throws Exception { + builder.startObject(); + RestActions.buildNodesHeader(builder, channel.request(), response); + builder.field("cluster_name", response.getClusterName().value()); + response.toXContent(builder, channel.request()); + builder.endObject(); + // clear password for the original request + nodesRequest.secureSettingsPassword().close(); + return new BytesRestResponse(RestStatus.OK, builder); + } + }); + } + + @Override + public boolean canTripCircuitBreaker() { + return false; + } + +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java b/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java new file mode 100644 index 0000000000000..2061349e3301d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java @@ -0,0 +1,422 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsResponse; +import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.common.settings.SecureSettings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.plugins.ReloadablePlugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.StandardCopyOption; +import java.security.AccessControlException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.containsString; + +public class ReloadSecureSettingsIT extends ESIntegTestCase { + + public void testMissingKeystoreFile() throws Exception { + final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); + final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) + .stream().findFirst().get(); + final Environment environment = internalCluster().getInstance(Environment.class); + final AtomicReference reloadSettingsError = new AtomicReference<>(); + // keystore file should be missing for this test case + Files.deleteIfExists(KeyStoreWrapper.keystorePath(environment.configFile())); + final int initialReloadCount = mockReloadablePlugin.getReloadCount(); + final CountDownLatch latch = new CountDownLatch(1); + client().admin().cluster().prepareReloadSecureSettings().setSecureStorePassword(new SecureString(new char[0])).execute( + new ActionListener() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + assertThat(nodesReloadResponse, notNullValue()); + final Map nodesMap = nodesReloadResponse.getNodesMap(); + assertThat(nodesMap.size(), equalTo(cluster().size())); + for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { + assertThat(nodeResponse.reloadException(), notNullValue()); + assertThat(nodeResponse.reloadException(), instanceOf(IllegalStateException.class)); + assertThat(nodeResponse.reloadException().getMessage(), containsString("Keystore is missing")); + } + } catch (final AssertionError e) { + reloadSettingsError.set(e); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + reloadSettingsError.set(new AssertionError("Nodes request failed", e)); + latch.countDown(); + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + // in the missing keystore case no reload should be triggered + assertThat(mockReloadablePlugin.getReloadCount(), equalTo(initialReloadCount)); + } + + public void testNullKeystorePassword() throws Exception { + final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); + final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) + .stream().findFirst().get(); + final AtomicReference reloadSettingsError = new AtomicReference<>(); + final int initialReloadCount = mockReloadablePlugin.getReloadCount(); + final CountDownLatch latch = new CountDownLatch(1); + client().admin().cluster().prepareReloadSecureSettings().execute( + new ActionListener() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + reloadSettingsError.set(new AssertionError("Null keystore password should fail")); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + try { + assertThat(e, instanceOf(ActionRequestValidationException.class)); + assertThat(e.getMessage(), containsString("secure settings password cannot be null")); + } catch (final AssertionError ae) { + reloadSettingsError.set(ae); + } finally { + latch.countDown(); + } + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + // in the null password case no reload should be triggered + assertThat(mockReloadablePlugin.getReloadCount(), equalTo(initialReloadCount)); + } + + public void testInvalidKeystoreFile() throws Exception { + final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); + final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) + .stream().findFirst().get(); + final Environment environment = internalCluster().getInstance(Environment.class); + final AtomicReference reloadSettingsError = new AtomicReference<>(); + final int initialReloadCount = mockReloadablePlugin.getReloadCount(); + // invalid "keystore" file should be present in the config dir + try (InputStream keystore = ReloadSecureSettingsIT.class.getResourceAsStream("invalid.txt.keystore")) { + if (Files.exists(environment.configFile()) == false) { + Files.createDirectory(environment.configFile()); + } + Files.copy(keystore, KeyStoreWrapper.keystorePath(environment.configFile()), StandardCopyOption.REPLACE_EXISTING); + } + final CountDownLatch latch = new CountDownLatch(1); + client().admin().cluster().prepareReloadSecureSettings().setSecureStorePassword(new SecureString(new char[0])).execute( + new ActionListener() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + assertThat(nodesReloadResponse, notNullValue()); + final Map nodesMap = nodesReloadResponse.getNodesMap(); + assertThat(nodesMap.size(), equalTo(cluster().size())); + for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { + assertThat(nodeResponse.reloadException(), notNullValue()); + } + } catch (final AssertionError e) { + reloadSettingsError.set(e); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + reloadSettingsError.set(new AssertionError("Nodes request failed", e)); + latch.countDown(); + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + // in the invalid keystore format case no reload should be triggered + assertThat(mockReloadablePlugin.getReloadCount(), equalTo(initialReloadCount)); + } + + public void testWrongKeystorePassword() throws Exception { + final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); + final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) + .stream().findFirst().get(); + final Environment environment = internalCluster().getInstance(Environment.class); + final AtomicReference reloadSettingsError = new AtomicReference<>(); + final int initialReloadCount = mockReloadablePlugin.getReloadCount(); + // "some" keystore should be present in this case + writeEmptyKeystore(environment, new char[0]); + final CountDownLatch latch = new CountDownLatch(1); + client().admin() + .cluster() + .prepareReloadSecureSettings() + .setSecureStorePassword(new SecureString(new char[] { 'W', 'r', 'o', 'n', 'g' })) + .execute(new ActionListener() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + assertThat(nodesReloadResponse, notNullValue()); + final Map nodesMap = nodesReloadResponse.getNodesMap(); + assertThat(nodesMap.size(), equalTo(cluster().size())); + for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { + assertThat(nodeResponse.reloadException(), notNullValue()); + assertThat(nodeResponse.reloadException(), instanceOf(IOException.class)); + } + } catch (final AssertionError e) { + reloadSettingsError.set(e); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + reloadSettingsError.set(new AssertionError("Nodes request failed", e)); + latch.countDown(); + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + // in the wrong password case no reload should be triggered + assertThat(mockReloadablePlugin.getReloadCount(), equalTo(initialReloadCount)); + } + + public void testMisbehavingPlugin() throws Exception { + final Environment environment = internalCluster().getInstance(Environment.class); + final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); + final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) + .stream().findFirst().get(); + // make plugins throw on reload + for (final String nodeName : internalCluster().getNodeNames()) { + internalCluster().getInstance(PluginsService.class, nodeName) + .filterPlugins(MisbehavingReloadablePlugin.class) + .stream().findFirst().get().setShouldThrow(true); + } + final AtomicReference reloadSettingsError = new AtomicReference<>(); + final int initialReloadCount = mockReloadablePlugin.getReloadCount(); + // "some" keystore should be present + final SecureSettings secureSettings = writeEmptyKeystore(environment, new char[0]); + // read seed setting value from the test case (not from the node) + final String seedValue = KeyStoreWrapper.SEED_SETTING + .get(Settings.builder().put(environment.settings()).setSecureSettings(secureSettings).build()) + .toString(); + final CountDownLatch latch = new CountDownLatch(1); + client().admin().cluster().prepareReloadSecureSettings().setSecureStorePassword(new SecureString(new char[0])).execute( + new ActionListener() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + assertThat(nodesReloadResponse, notNullValue()); + final Map nodesMap = nodesReloadResponse.getNodesMap(); + assertThat(nodesMap.size(), equalTo(cluster().size())); + for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { + assertThat(nodeResponse.reloadException(), notNullValue()); + assertThat(nodeResponse.reloadException().getMessage(), containsString("If shouldThrow I throw")); + } + } catch (final AssertionError e) { + reloadSettingsError.set(e); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + reloadSettingsError.set(new AssertionError("Nodes request failed", e)); + latch.countDown(); + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + // even if one plugin fails to reload (throws Exception), others should be + // unperturbed + assertThat(mockReloadablePlugin.getReloadCount() - initialReloadCount, equalTo(1)); + // mock plugin should have been reloaded successfully + assertThat(mockReloadablePlugin.getSeedValue(), equalTo(seedValue)); + } + + public void testReloadWhileKeystoreChanged() throws Exception { + final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); + final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) + .stream().findFirst().get(); + final Environment environment = internalCluster().getInstance(Environment.class); + final int initialReloadCount = mockReloadablePlugin.getReloadCount(); + for (int i = 0; i < randomIntBetween(4, 8); i++) { + // write keystore + final SecureSettings secureSettings = writeEmptyKeystore(environment, new char[0]); + // read seed setting value from the test case (not from the node) + final String seedValue = KeyStoreWrapper.SEED_SETTING + .get(Settings.builder().put(environment.settings()).setSecureSettings(secureSettings).build()) + .toString(); + // reload call + successfulReloadCall(); + assertThat(mockReloadablePlugin.getSeedValue(), equalTo(seedValue)); + assertThat(mockReloadablePlugin.getReloadCount() - initialReloadCount, equalTo(i + 1)); + } + } + + @Override + protected Collection> nodePlugins() { + final List> plugins = Arrays.asList(MockReloadablePlugin.class, MisbehavingReloadablePlugin.class); + // shuffle as reload is called in order + Collections.shuffle(plugins, random()); + return plugins; + } + + private void successfulReloadCall() throws InterruptedException { + final AtomicReference reloadSettingsError = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + client().admin().cluster().prepareReloadSecureSettings().setSecureStorePassword(new SecureString(new char[0])).execute( + new ActionListener() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + assertThat(nodesReloadResponse, notNullValue()); + final Map nodesMap = nodesReloadResponse.getNodesMap(); + assertThat(nodesMap.size(), equalTo(cluster().size())); + for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { + assertThat(nodeResponse.reloadException(), nullValue()); + } + } catch (final AssertionError e) { + reloadSettingsError.set(e); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + reloadSettingsError.set(new AssertionError("Nodes request failed", e)); + latch.countDown(); + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + } + + private SecureSettings writeEmptyKeystore(Environment environment, char[] password) throws Exception { + final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create(); + try { + keyStoreWrapper.save(environment.configFile(), password); + } catch (final AccessControlException e) { + if (e.getPermission() instanceof RuntimePermission && e.getPermission().getName().equals("accessUserInformation")) { + // this is expected: the save method is extra diligent and wants to make sure + // the keystore is readable, not relying on umask and whatnot. It's ok, we don't + // care about this in tests. + } else { + throw e; + } + } + return keyStoreWrapper; + } + + public static class CountingReloadablePlugin extends Plugin implements ReloadablePlugin { + + private volatile int reloadCount; + + public CountingReloadablePlugin() { + } + + @Override + public void reload(Settings settings) throws Exception { + reloadCount++; + } + + public int getReloadCount() { + return reloadCount; + } + + } + + public static class MockReloadablePlugin extends CountingReloadablePlugin { + + private volatile String seedValue; + + public MockReloadablePlugin() { + } + + @Override + public void reload(Settings settings) throws Exception { + super.reload(settings); + this.seedValue = KeyStoreWrapper.SEED_SETTING.get(settings).toString(); + } + + public String getSeedValue() { + return seedValue; + } + + } + + public static class MisbehavingReloadablePlugin extends CountingReloadablePlugin { + + private boolean shouldThrow = false; + + public MisbehavingReloadablePlugin() { + } + + @Override + public synchronized void reload(Settings settings) throws Exception { + super.reload(settings); + if (shouldThrow) { + shouldThrow = false; + throw new Exception("If shouldThrow I throw"); + } + } + + public synchronized void setShouldThrow(boolean shouldThrow) { + this.shouldThrow = shouldThrow; + } + } + +} diff --git a/server/src/test/resources/org/elasticsearch/action/admin/invalid.txt.keystore b/server/src/test/resources/org/elasticsearch/action/admin/invalid.txt.keystore new file mode 100644 index 0000000000000..04613ffab7f36 --- /dev/null +++ b/server/src/test/resources/org/elasticsearch/action/admin/invalid.txt.keystore @@ -0,0 +1,3 @@ +admin admin +dragon 12345 + From 8c0ec05a1225c76a4ae9e6592ac38e63e5ed4dc8 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Mon, 18 Jun 2018 09:46:12 +0100 Subject: [PATCH 11/92] Expose lucene's RemoveDuplicatesTokenFilter (#31275) --- docs/reference/analysis/tokenfilters.asciidoc | 4 +- .../remove-duplicates-tokenfilter.asciidoc | 5 ++ .../analysis/common/CommonAnalysisPlugin.java | 1 + .../RemoveDuplicatesTokenFilterFactory.java | 42 +++++++++++++ .../RemoveDuplicatesFilterFactoryTests.java | 61 +++++++++++++++++++ 5 files changed, 112 insertions(+), 1 deletion(-) create mode 100644 docs/reference/analysis/tokenfilters/remove-duplicates-tokenfilter.asciidoc create mode 100644 modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RemoveDuplicatesTokenFilterFactory.java create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java diff --git a/docs/reference/analysis/tokenfilters.asciidoc b/docs/reference/analysis/tokenfilters.asciidoc index 6e77b4498650d..dd5cb2e702cff 100644 --- a/docs/reference/analysis/tokenfilters.asciidoc +++ b/docs/reference/analysis/tokenfilters.asciidoc @@ -95,4 +95,6 @@ include::tokenfilters/decimal-digit-tokenfilter.asciidoc[] include::tokenfilters/fingerprint-tokenfilter.asciidoc[] -include::tokenfilters/minhash-tokenfilter.asciidoc[] \ No newline at end of file +include::tokenfilters/minhash-tokenfilter.asciidoc[] + +include::tokenfilters/remove-duplicates-tokenfilter.asciidoc[] \ No newline at end of file diff --git a/docs/reference/analysis/tokenfilters/remove-duplicates-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/remove-duplicates-tokenfilter.asciidoc new file mode 100644 index 0000000000000..594e18eaf7f7e --- /dev/null +++ b/docs/reference/analysis/tokenfilters/remove-duplicates-tokenfilter.asciidoc @@ -0,0 +1,5 @@ +[[analysis-remove-duplicates-tokenfilter]] +=== Remove Duplicates Token Filter + +A token filter of type `remove_duplicates` that drops identical tokens at the +same position. diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 722d75a9293f7..04df77245438c 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -194,6 +194,7 @@ public Map> getTokenFilters() { filters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceTokenFilterFactory::new)); filters.put("persian_normalization", PersianNormalizationFilterFactory::new); filters.put("porter_stem", PorterStemTokenFilterFactory::new); + filters.put("remove_duplicates", RemoveDuplicatesTokenFilterFactory::new); filters.put("reverse", ReverseTokenFilterFactory::new); filters.put("russian_stem", RussianStemTokenFilterFactory::new); filters.put("scandinavian_folding", ScandinavianFoldingFilterFactory::new); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RemoveDuplicatesTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RemoveDuplicatesTokenFilterFactory.java new file mode 100644 index 0000000000000..a136c5573121e --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RemoveDuplicatesTokenFilterFactory.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilter; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; + +/** + * Filter factory for the lucene RemoveDuplicatesTokenFilter + */ +class RemoveDuplicatesTokenFilterFactory extends AbstractTokenFilterFactory { + + RemoveDuplicatesTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { + super(indexSettings, name, settings); + } + + @Override + public TokenStream create(TokenStream tokenStream) { + return new RemoveDuplicatesTokenFilter(tokenStream); + } +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java new file mode 100644 index 0000000000000..8180985416f52 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.CannedTokenStream; +import org.apache.lucene.analysis.Token; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.analysis.AnalysisTestsHelper; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTokenStreamTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.instanceOf; + +public class RemoveDuplicatesFilterFactoryTests extends ESTokenStreamTestCase { + + public void testRemoveDuplicatesFilter() throws IOException { + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.removedups.type", "remove_duplicates") + .build(); + ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("removedups"); + assertThat(tokenFilter, instanceOf(RemoveDuplicatesTokenFilterFactory.class)); + + CannedTokenStream cts = new CannedTokenStream( + new Token("a", 1, 0, 1), + new Token("b", 1, 2, 3), + new Token("c", 0, 2, 3), + new Token("b", 0, 2, 3), + new Token("d", 1, 4, 5) + ); + + assertTokenStreamContents(tokenFilter.create(cts), new String[]{ + "a", "b", "c", "d" + }, new int[]{ + 1, 1, 0, 1 + }); + } + +} From f5135050526d5ba3febcb0b43a7dd708d49a4c0f Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 18 Jun 2018 10:49:22 +0200 Subject: [PATCH 12/92] [Test] Fix :example-plugins:rest-handler on Windows --- plugins/examples/rest-handler/build.gradle | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/plugins/examples/rest-handler/build.gradle b/plugins/examples/rest-handler/build.gradle index 2c55c3c79fce7..cfe84e6a45a93 100644 --- a/plugins/examples/rest-handler/build.gradle +++ b/plugins/examples/rest-handler/build.gradle @@ -30,10 +30,9 @@ test.enabled = false task exampleFixture(type: org.elasticsearch.gradle.test.AntFixture) { dependsOn testClasses + env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" executable = new File(project.runtimeJavaHome, 'bin/java') - args '-cp', "${ -> project.sourceSets.test.runtimeClasspath.asPath }", - 'org.elasticsearch.example.resthandler.ExampleFixture', - baseDir, 'TEST' + args 'org.elasticsearch.example.resthandler.ExampleFixture', baseDir, 'TEST' } integTestCluster { From 47095357bc1ae0df9a19ea328a64745b11aa2e08 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 18 Jun 2018 11:24:43 +0200 Subject: [PATCH 13/92] Move language analyzers from server to analysis-common module. (#31300) The following analyzers were moved from server module to analysis-common module: `greek`, `hindi`, `hungarian`, `indonesian`, `irish`, `italian`, `latvian`, `lithuanian`, `norwegian`, `persian`, `portuguese`, `romanian`, `russian`, `sorani`, `spanish`, `swedish`, `turkish` and `thai`. Relates to #23658 --- .../analysis/common/CommonAnalysisPlugin.java | 62 ++- .../common}/GreekAnalyzerProvider.java | 6 +- .../common}/HindiAnalyzerProvider.java | 6 +- .../common}/HungarianAnalyzerProvider.java | 6 +- .../common}/IndonesianAnalyzerProvider.java | 6 +- .../common}/IrishAnalyzerProvider.java | 6 +- .../common}/ItalianAnalyzerProvider.java | 6 +- .../common}/LatvianAnalyzerProvider.java | 6 +- .../common}/LithuanianAnalyzerProvider.java | 6 +- .../common}/NorwegianAnalyzerProvider.java | 6 +- .../common}/PersianAnalyzerProvider.java | 6 +- .../common}/PortugueseAnalyzerProvider.java | 6 +- .../common}/RomanianAnalyzerProvider.java | 6 +- .../common}/RussianAnalyzerProvider.java | 6 +- .../common}/SoraniAnalyzerProvider.java | 6 +- .../common}/SpanishAnalyzerProvider.java | 6 +- .../common}/SwedishAnalyzerProvider.java | 6 +- .../common}/ThaiAnalyzerProvider.java | 6 +- .../common}/TurkishAnalyzerProvider.java | 6 +- .../test/analysis-common/20_analyzers.yml | 522 ++++++++++++++++++ .../indices/analysis/AnalysisModule.java | 36 -- .../indices/analysis/PreBuiltAnalyzers.java | 180 ------ .../indices/analysis/AnalysisModuleTests.java | 2 +- 23 files changed, 655 insertions(+), 255 deletions(-) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/GreekAnalyzerProvider.java (84%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/HindiAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/HungarianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/IndonesianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/IrishAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/ItalianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/LatvianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/LithuanianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/NorwegianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/PersianAnalyzerProvider.java (83%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/PortugueseAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/RomanianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/RussianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/SoraniAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/SpanishAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/SwedishAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/ThaiAnalyzerProvider.java (83%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/TurkishAnalyzerProvider.java (85%) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 04df77245438c..cdd8101a73c70 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -37,6 +37,7 @@ import org.apache.lucene.analysis.cjk.CJKAnalyzer; import org.apache.lucene.analysis.cjk.CJKBigramFilter; import org.apache.lucene.analysis.cjk.CJKWidthFilter; +import org.apache.lucene.analysis.ckb.SoraniAnalyzer; import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter; import org.apache.lucene.analysis.commongrams.CommonGramsFilter; import org.apache.lucene.analysis.core.DecimalDigitFilter; @@ -52,17 +53,27 @@ import org.apache.lucene.analysis.de.GermanAnalyzer; import org.apache.lucene.analysis.de.GermanNormalizationFilter; import org.apache.lucene.analysis.de.GermanStemFilter; +import org.apache.lucene.analysis.el.GreekAnalyzer; import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.en.KStemFilter; import org.apache.lucene.analysis.en.PorterStemFilter; +import org.apache.lucene.analysis.es.SpanishAnalyzer; import org.apache.lucene.analysis.eu.BasqueAnalyzer; +import org.apache.lucene.analysis.fa.PersianAnalyzer; import org.apache.lucene.analysis.fa.PersianNormalizationFilter; import org.apache.lucene.analysis.fi.FinnishAnalyzer; import org.apache.lucene.analysis.fr.FrenchAnalyzer; +import org.apache.lucene.analysis.ga.IrishAnalyzer; import org.apache.lucene.analysis.gl.GalicianAnalyzer; +import org.apache.lucene.analysis.hi.HindiAnalyzer; import org.apache.lucene.analysis.hi.HindiNormalizationFilter; +import org.apache.lucene.analysis.hu.HungarianAnalyzer; import org.apache.lucene.analysis.hy.ArmenianAnalyzer; +import org.apache.lucene.analysis.id.IndonesianAnalyzer; import org.apache.lucene.analysis.in.IndicNormalizationFilter; +import org.apache.lucene.analysis.it.ItalianAnalyzer; +import org.apache.lucene.analysis.lt.LithuanianAnalyzer; +import org.apache.lucene.analysis.lv.LatvianAnalyzer; import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilter; import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute; import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter; @@ -79,19 +90,26 @@ import org.apache.lucene.analysis.ngram.NGramTokenFilter; import org.apache.lucene.analysis.ngram.NGramTokenizer; import org.apache.lucene.analysis.nl.DutchAnalyzer; +import org.apache.lucene.analysis.no.NorwegianAnalyzer; import org.apache.lucene.analysis.path.PathHierarchyTokenizer; import org.apache.lucene.analysis.pattern.PatternTokenizer; import org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilter; import org.apache.lucene.analysis.payloads.TypeAsPayloadTokenFilter; +import org.apache.lucene.analysis.pt.PortugueseAnalyzer; import org.apache.lucene.analysis.reverse.ReverseStringFilter; +import org.apache.lucene.analysis.ro.RomanianAnalyzer; +import org.apache.lucene.analysis.ru.RussianAnalyzer; import org.apache.lucene.analysis.shingle.ShingleFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; import org.apache.lucene.analysis.standard.ClassicFilter; import org.apache.lucene.analysis.standard.ClassicTokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; +import org.apache.lucene.analysis.sv.SwedishAnalyzer; +import org.apache.lucene.analysis.th.ThaiAnalyzer; import org.apache.lucene.analysis.th.ThaiTokenizer; import org.apache.lucene.analysis.tr.ApostropheFilter; +import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.apache.lucene.analysis.util.ElisionFilter; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; @@ -130,6 +148,8 @@ public Map>> getAn analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new); analyzers.put("pattern", PatternAnalyzerProvider::new); analyzers.put("snowball", SnowballAnalyzerProvider::new); + + // Language analyzers: analyzers.put("arabic", ArabicAnalyzerProvider::new); analyzers.put("armenian", ArmenianAnalyzerProvider::new); analyzers.put("basque", BasqueAnalyzerProvider::new); @@ -147,6 +167,24 @@ public Map>> getAn analyzers.put("french", FrenchAnalyzerProvider::new); analyzers.put("galician", GalicianAnalyzerProvider::new); analyzers.put("german", GermanAnalyzerProvider::new); + analyzers.put("greek", GreekAnalyzerProvider::new); + analyzers.put("hindi", HindiAnalyzerProvider::new); + analyzers.put("hungarian", HungarianAnalyzerProvider::new); + analyzers.put("indonesian", IndonesianAnalyzerProvider::new); + analyzers.put("irish", IrishAnalyzerProvider::new); + analyzers.put("italian", ItalianAnalyzerProvider::new); + analyzers.put("latvian", LatvianAnalyzerProvider::new); + analyzers.put("lithuanian", LithuanianAnalyzerProvider::new); + analyzers.put("norwegian", NorwegianAnalyzerProvider::new); + analyzers.put("persian", PersianAnalyzerProvider::new); + analyzers.put("portuguese", PortugueseAnalyzerProvider::new); + analyzers.put("romanian", RomanianAnalyzerProvider::new); + analyzers.put("russian", RussianAnalyzerProvider::new); + analyzers.put("sorani", SoraniAnalyzerProvider::new); + analyzers.put("spanish", SpanishAnalyzerProvider::new); + analyzers.put("swedish", SwedishAnalyzerProvider::new); + analyzers.put("turkish", TurkishAnalyzerProvider::new); + analyzers.put("thai", ThaiAnalyzerProvider::new); return analyzers; } @@ -248,13 +286,15 @@ public Map> getTokenizers() { @Override public List getPreBuiltAnalyzerProviderFactories() { List analyzers = new ArrayList<>(); - analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.LUCENE, + analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.ELASTICSEARCH, () -> new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET))); analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH, () -> new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, CharArraySet.EMPTY_SET))); analyzers.add(new PreBuiltAnalyzerProviderFactory("snowball", CachingStrategy.LUCENE, () -> new SnowballAnalyzer("English", StopAnalyzer.ENGLISH_STOP_WORDS_SET))); + + // Language analyzers: analyzers.add(new PreBuiltAnalyzerProviderFactory("arabic", CachingStrategy.LUCENE, ArabicAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("armenian", CachingStrategy.LUCENE, ArmenianAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("basque", CachingStrategy.LUCENE, BasqueAnalyzer::new)); @@ -263,7 +303,7 @@ public List getPreBuiltAnalyzerProviderFactorie analyzers.add(new PreBuiltAnalyzerProviderFactory("bulgarian", CachingStrategy.LUCENE, BulgarianAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("catalan", CachingStrategy.LUCENE, CatalanAnalyzer::new)); // chinese analyzer: only for old indices, best effort - analyzers.add(new PreBuiltAnalyzerProviderFactory("chinese", CachingStrategy.LUCENE, StandardAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("chinese", CachingStrategy.ONE, StandardAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("cjk", CachingStrategy.LUCENE, CJKAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("czech", CachingStrategy.LUCENE, CzechAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("danish", CachingStrategy.LUCENE, DanishAnalyzer::new)); @@ -273,6 +313,24 @@ public List getPreBuiltAnalyzerProviderFactorie analyzers.add(new PreBuiltAnalyzerProviderFactory("french", CachingStrategy.LUCENE, FrenchAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("galician", CachingStrategy.LUCENE, GalicianAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("german", CachingStrategy.LUCENE, GermanAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("greek", CachingStrategy.LUCENE, GreekAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("hindi", CachingStrategy.LUCENE, HindiAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("hungarian", CachingStrategy.LUCENE, HungarianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("indonesian", CachingStrategy.LUCENE, IndonesianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("irish", CachingStrategy.LUCENE, IrishAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("italian", CachingStrategy.LUCENE, ItalianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("latvian", CachingStrategy.LUCENE, LatvianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("lithuanian", CachingStrategy.LUCENE, LithuanianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("norwegian", CachingStrategy.LUCENE, NorwegianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("persian", CachingStrategy.LUCENE, PersianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("portuguese", CachingStrategy.LUCENE, PortugueseAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("romanian", CachingStrategy.LUCENE, RomanianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("russian", CachingStrategy.LUCENE, RussianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("sorani", CachingStrategy.LUCENE, SoraniAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("spanish", CachingStrategy.LUCENE, SpanishAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("swedish", CachingStrategy.LUCENE, SwedishAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("turkish", CachingStrategy.LUCENE, TurkishAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("thai", CachingStrategy.LUCENE, ThaiAnalyzer::new)); return analyzers; } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/GreekAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GreekAnalyzerProvider.java similarity index 84% rename from server/src/main/java/org/elasticsearch/index/analysis/GreekAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GreekAnalyzerProvider.java index d14539269b497..2c237f7fe3cd0 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/GreekAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GreekAnalyzerProvider.java @@ -17,18 +17,20 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.el.GreekAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class GreekAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final GreekAnalyzer analyzer; - public GreekAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + GreekAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new GreekAnalyzer( Analysis.parseStopWords(env, settings, GreekAnalyzer.getDefaultStopSet())); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/HindiAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HindiAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/HindiAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HindiAnalyzerProvider.java index 166390df1ba3f..2f4d50c4c76b4 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/HindiAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HindiAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.hi.HindiAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class HindiAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final HindiAnalyzer analyzer; - public HindiAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + HindiAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new HindiAnalyzer( Analysis.parseStopWords(env, settings, HindiAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/HungarianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HungarianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/HungarianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HungarianAnalyzerProvider.java index f4c481701f688..55ade50a1c95a 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/HungarianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HungarianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.hu.HungarianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class HungarianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final HungarianAnalyzer analyzer; - public HungarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + HungarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new HungarianAnalyzer( Analysis.parseStopWords(env, settings, HungarianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/IndonesianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/IndonesianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/IndonesianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/IndonesianAnalyzerProvider.java index f5a31fb3f8911..583268165f243 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/IndonesianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/IndonesianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.id.IndonesianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class IndonesianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final IndonesianAnalyzer analyzer; - public IndonesianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + IndonesianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new IndonesianAnalyzer( Analysis.parseStopWords(env, settings, IndonesianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/IrishAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/IrishAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/IrishAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/IrishAnalyzerProvider.java index 9178f9e70d185..515121a3a6ae4 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/IrishAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/IrishAnalyzerProvider.java @@ -17,13 +17,15 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ga.IrishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; /** * Provider for {@link IrishAnalyzer} @@ -32,7 +34,7 @@ public class IrishAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final ItalianAnalyzer analyzer; - public ItalianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + ItalianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new ItalianAnalyzer( Analysis.parseStopWords(env, settings, ItalianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/LatvianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LatvianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/LatvianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LatvianAnalyzerProvider.java index 39f4c02853e5c..d57f47923d1a1 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/LatvianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LatvianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.lv.LatvianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class LatvianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final LatvianAnalyzer analyzer; - public LatvianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + LatvianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new LatvianAnalyzer( Analysis.parseStopWords(env, settings, LatvianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/LithuanianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LithuanianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/LithuanianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LithuanianAnalyzerProvider.java index c8d25ce3342fe..b6db45d41744d 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/LithuanianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LithuanianAnalyzerProvider.java @@ -17,13 +17,15 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.lt.LithuanianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; /** * Provider for {@link LithuanianAnalyzer} @@ -32,7 +34,7 @@ public class LithuanianAnalyzerProvider extends AbstractIndexAnalyzerProvider

  • { private final NorwegianAnalyzer analyzer; - public NorwegianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + NorwegianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new NorwegianAnalyzer( Analysis.parseStopWords(env, settings, NorwegianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/PersianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java similarity index 83% rename from server/src/main/java/org/elasticsearch/index/analysis/PersianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java index 298a6dacf48aa..2cca6bbe09b24 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/PersianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java @@ -17,18 +17,20 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.fa.PersianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final PersianAnalyzer analyzer; - public PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())); analyzer.setVersion(version); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/PortugueseAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PortugueseAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/PortugueseAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PortugueseAnalyzerProvider.java index cb0b0e6a20954..9bf27c8da763c 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/PortugueseAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PortugueseAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.pt.PortugueseAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class PortugueseAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final PortugueseAnalyzer analyzer; - public PortugueseAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + PortugueseAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new PortugueseAnalyzer( Analysis.parseStopWords(env, settings, PortugueseAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/RomanianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/RomanianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java index d92663d65c03d..a71344a2bdb85 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/RomanianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ro.RomanianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final RomanianAnalyzer analyzer; - public RomanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + RomanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new RomanianAnalyzer( Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/RussianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RussianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/RussianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RussianAnalyzerProvider.java index 9529f31a67b3d..4d015ae3578e6 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/RussianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RussianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ru.RussianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class RussianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final RussianAnalyzer analyzer; - public RussianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + RussianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new RussianAnalyzer( Analysis.parseStopWords(env, settings, RussianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SoraniAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SoraniAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/SoraniAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SoraniAnalyzerProvider.java index ea5ab7d885a73..1391e924bbc21 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SoraniAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SoraniAnalyzerProvider.java @@ -17,13 +17,15 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ckb.SoraniAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; /** * Provider for {@link SoraniAnalyzer} @@ -32,7 +34,7 @@ public class SoraniAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final SpanishAnalyzer analyzer; - public SpanishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + SpanishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new SpanishAnalyzer( Analysis.parseStopWords(env, settings, SpanishAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SwedishAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SwedishAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/SwedishAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SwedishAnalyzerProvider.java index 6e868609fa93d..bf01e63e625e5 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SwedishAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SwedishAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.sv.SwedishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class SwedishAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final SwedishAnalyzer analyzer; - public SwedishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + SwedishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new SwedishAnalyzer( Analysis.parseStopWords(env, settings, SwedishAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ThaiAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiAnalyzerProvider.java similarity index 83% rename from server/src/main/java/org/elasticsearch/index/analysis/ThaiAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiAnalyzerProvider.java index e692e952c5d41..7f9543904d9fc 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ThaiAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiAnalyzerProvider.java @@ -17,18 +17,20 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.th.ThaiAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class ThaiAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final ThaiAnalyzer analyzer; - public ThaiAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + ThaiAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new ThaiAnalyzer( Analysis.parseStopWords(env, settings, ThaiAnalyzer.getDefaultStopSet())); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/TurkishAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/TurkishAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/TurkishAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/TurkishAnalyzerProvider.java index e3445c2c2fc58..ecf9d082e7017 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/TurkishAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/TurkishAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class TurkishAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final TurkishAnalyzer analyzer; - public TurkishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + TurkishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new TurkishAnalyzer( Analysis.parseStopWords(env, settings, TurkishAnalyzer.getDefaultStopSet()), diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml index fa0476c0f11aa..fa8f6eef8b924 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml @@ -592,3 +592,525 @@ analyzer: my_analyzer - length: { tokens: 1 } - match: { tokens.0.token: tisch } + +--- +"greek": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: greek + + - do: + indices.analyze: + body: + text: Μία + analyzer: greek + - length: { tokens: 1 } + - match: { tokens.0.token: μια } + + - do: + indices.analyze: + index: test + body: + text: Μία + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: μια } + +--- +"hindi": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: hindi + + - do: + indices.analyze: + body: + text: हिन्दी + analyzer: hindi + - length: { tokens: 1 } + - match: { tokens.0.token: हिंद } + + - do: + indices.analyze: + index: test + body: + text: हिन्दी + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: हिंद } + +--- +"hungarian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: hungarian + + - do: + indices.analyze: + body: + text: babakocsi + analyzer: hungarian + - length: { tokens: 1 } + - match: { tokens.0.token: babakocs } + + - do: + indices.analyze: + index: test + body: + text: babakocsi + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: babakocs } + +--- +"indonesian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: indonesian + + - do: + indices.analyze: + body: + text: peledakan + analyzer: indonesian + - length: { tokens: 1 } + - match: { tokens.0.token: ledak } + + - do: + indices.analyze: + index: test + body: + text: peledakan + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: ledak } + +--- +"irish": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: irish + + - do: + indices.analyze: + body: + text: siopadóireacht + analyzer: irish + - length: { tokens: 1 } + - match: { tokens.0.token: siopadóir } + + - do: + indices.analyze: + index: test + body: + text: siopadóireacht + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: siopadóir } + +--- +"italian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: italian + + - do: + indices.analyze: + body: + text: abbandonata + analyzer: italian + - length: { tokens: 1 } + - match: { tokens.0.token: abbandonat } + + - do: + indices.analyze: + index: test + body: + text: abbandonata + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: abbandonat } + +--- +"latvian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: latvian + + - do: + indices.analyze: + body: + text: tirgiem + analyzer: latvian + - length: { tokens: 1 } + - match: { tokens.0.token: tirg } + + - do: + indices.analyze: + index: test + body: + text: tirgiem + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: tirg } + +--- +"lithuanian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: lithuanian + + - do: + indices.analyze: + body: + text: vaikų + analyzer: lithuanian + - length: { tokens: 1 } + - match: { tokens.0.token: vaik } + + - do: + indices.analyze: + index: test + body: + text: vaikų + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: vaik } + +--- +"norwegian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: norwegian + + - do: + indices.analyze: + body: + text: havnedistriktene + analyzer: norwegian + - length: { tokens: 1 } + - match: { tokens.0.token: havnedistrikt } + + - do: + indices.analyze: + index: test + body: + text: havnedistriktene + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: havnedistrikt } + +--- +"persian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: persian + + - do: + indices.analyze: + body: + text: می‌خورد + analyzer: persian + - length: { tokens: 1 } + - match: { tokens.0.token: خورد } + + - do: + indices.analyze: + index: test + body: + text: می‌خورد + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: خورد } + +--- +"portuguese": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: portuguese + + - do: + indices.analyze: + body: + text: quilométricas + analyzer: portuguese + - length: { tokens: 1 } + - match: { tokens.0.token: quilometric } + + - do: + indices.analyze: + index: test + body: + text: quilométricas + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: quilometric } + +--- +"romanian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: romanian + + - do: + indices.analyze: + body: + text: absenţa + analyzer: romanian + - length: { tokens: 1 } + - match: { tokens.0.token: absenţ } + + - do: + indices.analyze: + index: test + body: + text: absenţa + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: absenţ } + +--- +"russian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: russian + + - do: + indices.analyze: + body: + text: Вместе с тем о + analyzer: russian + - length: { tokens: 1 } + - match: { tokens.0.token: вмест } + + - do: + indices.analyze: + index: test + body: + text: Вместе с тем о + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: вмест } + +--- +"sorani": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: sorani + + - do: + indices.analyze: + body: + text: پیاوە + analyzer: sorani + - length: { tokens: 1 } + - match: { tokens.0.token: پیاو } + + - do: + indices.analyze: + index: test + body: + text: پیاوە + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: پیاو } + +--- +"spanish": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: spanish + + - do: + indices.analyze: + body: + text: chicana + analyzer: spanish + - length: { tokens: 1 } + - match: { tokens.0.token: chican } + + - do: + indices.analyze: + index: test + body: + text: chicana + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: chican } + +--- +"swedish": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: swedish + + - do: + indices.analyze: + body: + text: jaktkarlarne + analyzer: swedish + - length: { tokens: 1 } + - match: { tokens.0.token: jaktkarl } + + - do: + indices.analyze: + index: test + body: + text: jaktkarlarne + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: jaktkarl } + +--- +"turkish": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: turkish + + - do: + indices.analyze: + body: + text: ağacı + analyzer: turkish + - length: { tokens: 1 } + - match: { tokens.0.token: ağaç } + + - do: + indices.analyze: + index: test + body: + text: ağacı + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: ağaç } + +--- +"thai": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: thai + + - do: + indices.analyze: + body: + text: ๑๒๓๔ + analyzer: thai + - length: { tokens: 1 } + - match: { tokens.0.token: "1234" } + + - do: + indices.analyze: + index: test + body: + text: ๑๒๓๔ + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: "1234" } diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 6b7860c0cf949..364732dc1833d 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -30,39 +30,21 @@ import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.GreekAnalyzerProvider; -import org.elasticsearch.index.analysis.HindiAnalyzerProvider; -import org.elasticsearch.index.analysis.HungarianAnalyzerProvider; import org.elasticsearch.index.analysis.HunspellTokenFilterFactory; -import org.elasticsearch.index.analysis.IndonesianAnalyzerProvider; -import org.elasticsearch.index.analysis.IrishAnalyzerProvider; -import org.elasticsearch.index.analysis.ItalianAnalyzerProvider; import org.elasticsearch.index.analysis.KeywordAnalyzerProvider; -import org.elasticsearch.index.analysis.LatvianAnalyzerProvider; -import org.elasticsearch.index.analysis.LithuanianAnalyzerProvider; -import org.elasticsearch.index.analysis.NorwegianAnalyzerProvider; -import org.elasticsearch.index.analysis.PersianAnalyzerProvider; -import org.elasticsearch.index.analysis.PortugueseAnalyzerProvider; import org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactory; import org.elasticsearch.index.analysis.PreConfiguredCharFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenizer; -import org.elasticsearch.index.analysis.RomanianAnalyzerProvider; -import org.elasticsearch.index.analysis.RussianAnalyzerProvider; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.analysis.SimpleAnalyzerProvider; -import org.elasticsearch.index.analysis.SoraniAnalyzerProvider; -import org.elasticsearch.index.analysis.SpanishAnalyzerProvider; import org.elasticsearch.index.analysis.StandardAnalyzerProvider; import org.elasticsearch.index.analysis.StandardTokenFilterFactory; import org.elasticsearch.index.analysis.StandardTokenizerFactory; import org.elasticsearch.index.analysis.StopAnalyzerProvider; import org.elasticsearch.index.analysis.StopTokenFilterFactory; -import org.elasticsearch.index.analysis.SwedishAnalyzerProvider; -import org.elasticsearch.index.analysis.ThaiAnalyzerProvider; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.index.analysis.TurkishAnalyzerProvider; import org.elasticsearch.index.analysis.WhitespaceAnalyzerProvider; import org.elasticsearch.plugins.AnalysisPlugin; @@ -227,24 +209,6 @@ private NamedRegistry>> setupAnalyzers(List analyzers.register("stop", StopAnalyzerProvider::new); analyzers.register("whitespace", WhitespaceAnalyzerProvider::new); analyzers.register("keyword", KeywordAnalyzerProvider::new); - analyzers.register("greek", GreekAnalyzerProvider::new); - analyzers.register("hindi", HindiAnalyzerProvider::new); - analyzers.register("hungarian", HungarianAnalyzerProvider::new); - analyzers.register("indonesian", IndonesianAnalyzerProvider::new); - analyzers.register("irish", IrishAnalyzerProvider::new); - analyzers.register("italian", ItalianAnalyzerProvider::new); - analyzers.register("latvian", LatvianAnalyzerProvider::new); - analyzers.register("lithuanian", LithuanianAnalyzerProvider::new); - analyzers.register("norwegian", NorwegianAnalyzerProvider::new); - analyzers.register("persian", PersianAnalyzerProvider::new); - analyzers.register("portuguese", PortugueseAnalyzerProvider::new); - analyzers.register("romanian", RomanianAnalyzerProvider::new); - analyzers.register("russian", RussianAnalyzerProvider::new); - analyzers.register("sorani", SoraniAnalyzerProvider::new); - analyzers.register("spanish", SpanishAnalyzerProvider::new); - analyzers.register("swedish", SwedishAnalyzerProvider::new); - analyzers.register("turkish", TurkishAnalyzerProvider::new); - analyzers.register("thai", ThaiAnalyzerProvider::new); analyzers.extractAndRegister(plugins, AnalysisPlugin::getAnalyzers); return analyzers; } diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java b/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java index 0e9aed3c142d9..0f31a8a46f1db 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java @@ -20,30 +20,12 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.CharArraySet; -import org.apache.lucene.analysis.ckb.SoraniAnalyzer; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.analysis.core.SimpleAnalyzer; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; -import org.apache.lucene.analysis.el.GreekAnalyzer; -import org.apache.lucene.analysis.es.SpanishAnalyzer; -import org.apache.lucene.analysis.fa.PersianAnalyzer; -import org.apache.lucene.analysis.ga.IrishAnalyzer; -import org.apache.lucene.analysis.hi.HindiAnalyzer; -import org.apache.lucene.analysis.hu.HungarianAnalyzer; -import org.apache.lucene.analysis.id.IndonesianAnalyzer; -import org.apache.lucene.analysis.it.ItalianAnalyzer; -import org.apache.lucene.analysis.lt.LithuanianAnalyzer; -import org.apache.lucene.analysis.lv.LatvianAnalyzer; -import org.apache.lucene.analysis.no.NorwegianAnalyzer; -import org.apache.lucene.analysis.pt.PortugueseAnalyzer; -import org.apache.lucene.analysis.ro.RomanianAnalyzer; -import org.apache.lucene.analysis.ru.RussianAnalyzer; import org.apache.lucene.analysis.standard.ClassicAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.analysis.sv.SwedishAnalyzer; -import org.apache.lucene.analysis.th.ThaiAnalyzer; -import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.elasticsearch.Version; import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; @@ -110,168 +92,6 @@ protected Analyzer create(Version version) { a.setVersion(version.luceneVersion); return a; } - }, - - GREEK { - @Override - protected Analyzer create(Version version) { - Analyzer a = new GreekAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - HINDI { - @Override - protected Analyzer create(Version version) { - Analyzer a = new HindiAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - HUNGARIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new HungarianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - INDONESIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new IndonesianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - IRISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new IrishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - ITALIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new ItalianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - LATVIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new LatvianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - LITHUANIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new LithuanianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - NORWEGIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new NorwegianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - PERSIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new PersianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - PORTUGUESE { - @Override - protected Analyzer create(Version version) { - Analyzer a = new PortugueseAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - ROMANIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new RomanianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - RUSSIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new RussianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - SORANI { - @Override - protected Analyzer create(Version version) { - Analyzer a = new SoraniAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - SPANISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new SpanishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - SWEDISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new SwedishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - TURKISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new TurkishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - THAI { - @Override - protected Analyzer create(Version version) { - Analyzer a = new ThaiAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } }; protected abstract Analyzer create(Version version); diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index a31dcc81f722e..47f30e10ef912 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -165,7 +165,7 @@ public void testVersionedAnalyzers() throws Exception { assertEquals(Version.V_5_0_0.luceneVersion, indexAnalyzers.get("standard").analyzer().getVersion()); assertEquals(Version.V_5_0_0.luceneVersion, - indexAnalyzers.get("thai").analyzer().getVersion()); + indexAnalyzers.get("stop").analyzer().getVersion()); assertThat(indexAnalyzers.get("custom7").analyzer(), is(instanceOf(StandardAnalyzer.class))); assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), From c4f8df3ad6b8657557a7efb9399e70927e1f0340 Mon Sep 17 00:00:00 2001 From: Sohaib Iftikhar Date: Mon, 18 Jun 2018 15:59:29 +0200 Subject: [PATCH 14/92] REST high-level client: add validate query API (#31077) Adds the validate query API to the high level rest client. --- .../elasticsearch/client/IndicesClient.java | 32 +++++ .../client/RequestConverters.java | 15 +++ .../elasticsearch/client/IndicesClientIT.java | 39 ++++++ .../client/RequestConvertersTests.java | 35 ++++++ .../IndicesClientDocumentationIT.java | 83 +++++++++++++ .../indices/validate_query.asciidoc | 113 ++++++++++++++++++ .../high-level/supported-apis.asciidoc | 2 + .../validate/query/QueryExplanation.java | 82 ++++++++++++- .../validate/query/ValidateQueryRequest.java | 12 +- .../validate/query/ValidateQueryResponse.java | 53 +++++--- .../indices/RestValidateQueryAction.java | 3 +- .../validate/query/QueryExplanationTests.java | 59 +++++++++ .../query/ValidateQueryResponseTests.java | 110 +++++++++++++++++ 13 files changed, 618 insertions(+), 20 deletions(-) create mode 100644 docs/java-rest/high-level/indices/validate_query.asciidoc create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanationTests.java create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 5f85b18091d72..30a42eb333f4a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -57,6 +57,8 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -661,6 +663,36 @@ public void putTemplateAsync(PutIndexTemplateRequest putIndexTemplateRequest, Re PutIndexTemplateResponse::fromXContent, listener, emptySet()); } + /** + * Validate a potentially expensive query without executing it. + *

    + * See Validate Query API + * on elastic.co + * @param validateQueryRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public ValidateQueryResponse validateQuery(ValidateQueryRequest validateQueryRequest, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(validateQueryRequest, RequestConverters::validateQuery, options, + ValidateQueryResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously validate a potentially expensive query without executing it. + *

    + * See Validate Query API + * on elastic.co + * @param validateQueryRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void validateQueryAsync(ValidateQueryRequest validateQueryRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(validateQueryRequest, RequestConverters::validateQuery, options, + ValidateQueryResponse::fromXContent, listener, emptySet()); + } + /** * Gets index templates using the Index Templates API * See Index Templates API diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 93bf6a1a19881..ab85af9f1fd7e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -58,6 +58,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; @@ -856,6 +857,20 @@ static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) thro return request; } + static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException { + String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices(); + String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types(); + String endpoint = endpoint(indices, types, "_validate/query"); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + Params params = new Params(request); + params.withIndicesOptions(validateQueryRequest.indicesOptions()); + params.putParam("explain", Boolean.toString(validateQueryRequest.explain())); + params.putParam("all_shards", Boolean.toString(validateQueryRequest.allShards())); + params.putParam("rewrite", Boolean.toString(validateQueryRequest.rewrite())); + request.setEntity(createEntity(validateQueryRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request getAlias(GetAliasesRequest getAliasesRequest) { String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices(); String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 82ac161f5afe0..c226b5349267c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.client; import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; @@ -63,6 +64,8 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; @@ -80,6 +83,8 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -1155,6 +1160,40 @@ public void testPutTemplateBadRequests() throws Exception { assertThat(unknownSettingError.getDetailedMessage(), containsString("unknown setting [index.this-setting-does-not-exist]")); } + public void testValidateQuery() throws IOException{ + String index = "some_index"; + createIndex(index, Settings.EMPTY); + QueryBuilder builder = QueryBuilders + .boolQuery() + .must(QueryBuilders.queryStringQuery("*:*")) + .filter(QueryBuilders.termQuery("user", "kimchy")); + ValidateQueryRequest request = new ValidateQueryRequest(index).query(builder); + request.explain(randomBoolean()); + ValidateQueryResponse response = execute(request, highLevelClient().indices()::validateQuery, + highLevelClient().indices()::validateQueryAsync); + assertTrue(response.isValid()); + } + + public void testInvalidValidateQuery() throws IOException{ + String index = "shakespeare"; + + createIndex(index, Settings.EMPTY); + Request postDoc = new Request(HttpPost.METHOD_NAME, "/" + index + "/1"); + postDoc.setJsonEntity( + "{\"type\":\"act\",\"line_id\":1,\"play_name\":\"Henry IV\", \"speech_number\":\"\"," + + "\"line_number\":\"\",\"speaker\":\"\",\"text_entry\":\"ACT I\"}"); + assertOK(client().performRequest(postDoc)); + + QueryBuilder builder = QueryBuilders + .queryStringQuery("line_id:foo") + .lenient(false); + ValidateQueryRequest request = new ValidateQueryRequest(index).query(builder); + request.explain(true); + ValidateQueryResponse response = execute(request, highLevelClient().indices()::validateQuery, + highLevelClient().indices()::validateQueryAsync); + assertFalse(response.isValid()); + } + public void testGetIndexTemplate() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index aa8221f30991e..60f427b490462 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -60,6 +60,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.delete.DeleteRequest; @@ -1895,6 +1896,40 @@ public void testPutTemplateRequest() throws Exception { assertToXContentBody(putTemplateRequest, request.getEntity()); } + public void testValidateQuery() throws Exception { + String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); + String[] types = randomBoolean() ? generateRandomStringArray(5, 5, false, false) : null; + ValidateQueryRequest validateQueryRequest; + if (randomBoolean()) { + validateQueryRequest = new ValidateQueryRequest(indices); + } else { + validateQueryRequest = new ValidateQueryRequest(); + validateQueryRequest.indices(indices); + } + validateQueryRequest.types(types); + Map expectedParams = new HashMap<>(); + setRandomIndicesOptions(validateQueryRequest::indicesOptions, validateQueryRequest::indicesOptions, expectedParams); + validateQueryRequest.explain(randomBoolean()); + validateQueryRequest.rewrite(randomBoolean()); + validateQueryRequest.allShards(randomBoolean()); + expectedParams.put("explain", Boolean.toString(validateQueryRequest.explain())); + expectedParams.put("rewrite", Boolean.toString(validateQueryRequest.rewrite())); + expectedParams.put("all_shards", Boolean.toString(validateQueryRequest.allShards())); + Request request = RequestConverters.validateQuery(validateQueryRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indices != null && indices.length > 0) { + endpoint.add(String.join(",", indices)); + if (types != null && types.length > 0) { + endpoint.add(String.join(",", types)); + } + } + endpoint.add("_validate/query"); + assertThat(request.getEndpoint(), equalTo(endpoint.toString())); + assertThat(request.getParameters(), equalTo(expectedParams)); + assertToXContentBody(validateQueryRequest, request.getEntity()); + assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); + } + public void testGetTemplateRequest() throws Exception { Map encodes = new HashMap<>(); encodes.put("log", "log"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 7bd6b16cecc99..9cc28152d03e3 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -62,6 +62,9 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; +import org.elasticsearch.action.admin.indices.validate.query.QueryExplanation; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.IndicesOptions; @@ -81,6 +84,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; @@ -2128,4 +2132,83 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + + public void testValidateQuery() throws IOException, InterruptedException { + RestHighLevelClient client = highLevelClient(); + + String index = "some_index"; + createIndex(index, Settings.EMPTY); + + // tag::validate-query-request + ValidateQueryRequest request = new ValidateQueryRequest(index); // <1> + // end::validate-query-request + + // tag::validate-query-request-query + QueryBuilder builder = QueryBuilders + .boolQuery() // <1> + .must(QueryBuilders.queryStringQuery("*:*")) + .filter(QueryBuilders.termQuery("user", "kimchy")); + request.query(builder); // <2> + // end::validate-query-request-query + + // tag::validate-query-request-explain + request.explain(true); // <1> + // end::validate-query-request-explain + + // tag::validate-query-request-allShards + request.allShards(true); // <1> + // end::validate-query-request-allShards + + // tag::validate-query-request-rewrite + request.rewrite(true); // <1> + // end::validate-query-request-rewrite + + // tag::validate-query-execute + ValidateQueryResponse response = client.indices().validateQuery(request, RequestOptions.DEFAULT); // <1> + // end::validate-query-execute + + // tag::validate-query-response + boolean isValid = response.isValid(); // <1> + int totalShards = response.getTotalShards(); // <2> + int successfulShards = response.getSuccessfulShards(); // <3> + int failedShards = response.getFailedShards(); // <4> + if (failedShards > 0) { + for(DefaultShardOperationFailedException failure: response.getShardFailures()) { // <5> + String failedIndex = failure.index(); // <6> + int shardId = failure.shardId(); // <7> + String reason = failure.reason(); // <8> + } + } + for(QueryExplanation explanation: response.getQueryExplanation()) { // <9> + String explanationIndex = explanation.getIndex(); // <10> + int shardId = explanation.getShard(); // <11> + String explanationString = explanation.getExplanation(); // <12> + } + // end::validate-query-response + + // tag::validate-query-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(ValidateQueryResponse validateQueryResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::validate-query-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::validate-query-execute-async + client.indices().validateQueryAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::validate-query-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } } diff --git a/docs/java-rest/high-level/indices/validate_query.asciidoc b/docs/java-rest/high-level/indices/validate_query.asciidoc new file mode 100644 index 0000000000000..3b3b184b02875 --- /dev/null +++ b/docs/java-rest/high-level/indices/validate_query.asciidoc @@ -0,0 +1,113 @@ +[[java-rest-high-indices-validate-query]] +=== Validate Query API + +[[java-rest-high-indices-validate-query-request]] +==== Validate Query Request + +A `ValidateQueryRequest` requires one or more `indices` on which the query is validated. If no index +is provided the request is executed on all indices. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request] +-------------------------------------------------- +<1> The index on which to run the request. + +In addition it also needs the query that needs to be validated. The query can be built using the `QueryBuilders` utility class. +The following code snippet builds a sample boolean query. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request-query] +-------------------------------------------------- +<1> Build the desired query. +<2> Set it to the request. + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request-explain] +-------------------------------------------------- +<1> The explain parameter can be set to true to get more detailed information about why a query failed + +By default, the request is executed on a single shard only, which is randomly selected. The detailed explanation of +the query may depend on which shard is being hit, and therefore may vary from one request to another. So, in case of +query rewrite the `allShards` parameter should be used to get response from all available shards. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request-allShards] +-------------------------------------------------- +<1> Set the allShards parameter. + +When the query is valid, the explanation defaults to the string representation of that query. With rewrite set to true, +the explanation is more detailed showing the actual Lucene query that will be executed + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request-rewrite] +-------------------------------------------------- +<1> Set the rewrite parameter. + +[[java-rest-high-indices-validate-query-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-execute] +-------------------------------------------------- +<1> Execute the request and get back the response in a ValidateQueryResponse object. + +[[java-rest-high-indices-validate-query-async]] +==== Asynchronous Execution + +The asynchronous execution of a validate query request requires both the `ValidateQueryRequest` +instance and an `ActionListener` instance to be passed to the asynchronous +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-execute-async] +-------------------------------------------------- +<1> The `ValidateQueryRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `ValidateQueryResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-indices-validate-query-response]] +==== Validate Query Response + +The returned `ValidateQueryResponse` allows to retrieve information about the executed + operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-response] +-------------------------------------------------- +<1> Check if the query is valid or not. +<2> Get total number of shards. +<3> Get number of shards that were successful. +<4> Get number of shards that failed. +<5> Get the shard failures as `DefaultShardOperationFailedException`. +<6> Get the index of a failed shard. +<7> Get the shard id of a failed shard. +<8> Get the reason for shard failure. +<9> Get the detailed explanation for the shards (if explain was set to `true`). +<10> Get the index to which a particular explanation belongs. +<11> Get the shard id to which a particular explanation belongs. +<12> Get the actual explanation string. \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index b33c2421b06d3..4cd87a521d104 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -73,6 +73,7 @@ Index Management:: * <> * <> * <> +* <> Mapping Management:: * <> @@ -103,6 +104,7 @@ include::indices/get_alias.asciidoc[] include::indices/put_settings.asciidoc[] include::indices/get_settings.asciidoc[] include::indices/put_template.asciidoc[] +include::indices/validate_query.asciidoc[] include::indices/get_templates.asciidoc[] == Cluster APIs diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java index 780bf037f0e28..e330a0b8565fc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java @@ -20,16 +20,57 @@ package org.elasticsearch.action.admin.indices.validate.query; import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Objects; -public class QueryExplanation implements Streamable { +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class QueryExplanation implements Streamable, ToXContentFragment { + + public static final String INDEX_FIELD = "index"; + public static final String SHARD_FIELD = "shard"; + public static final String VALID_FIELD = "valid"; + public static final String ERROR_FIELD = "error"; + public static final String EXPLANATION_FIELD = "explanation"; public static final int RANDOM_SHARD = -1; + @SuppressWarnings("unchecked") + static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "query_explanation", + true, + a -> { + int shard = RANDOM_SHARD; + if (a[1] != null) { + shard = (int)a[1]; + } + return new QueryExplanation( + (String)a[0], + shard, + (boolean)a[2], + (String)a[3], + (String)a[4] + ); + } + ); + static { + PARSER.declareString(optionalConstructorArg(), new ParseField(INDEX_FIELD)); + PARSER.declareInt(optionalConstructorArg(), new ParseField(SHARD_FIELD)); + PARSER.declareBoolean(constructorArg(), new ParseField(VALID_FIELD)); + PARSER.declareString(optionalConstructorArg(), new ParseField(EXPLANATION_FIELD)); + PARSER.declareString(optionalConstructorArg(), new ParseField(ERROR_FIELD)); + } + private String index; private int shard = RANDOM_SHARD; @@ -110,4 +151,43 @@ public static QueryExplanation readQueryExplanation(StreamInput in) throws IOEx exp.readFrom(in); return exp; } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (getIndex() != null) { + builder.field(INDEX_FIELD, getIndex()); + } + if(getShard() >= 0) { + builder.field(SHARD_FIELD, getShard()); + } + builder.field(VALID_FIELD, isValid()); + if (getError() != null) { + builder.field(ERROR_FIELD, getError()); + } + if (getExplanation() != null) { + builder.field(EXPLANATION_FIELD, getExplanation()); + } + return builder; + } + + public static QueryExplanation fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryExplanation other = (QueryExplanation) o; + return Objects.equals(getIndex(), other.getIndex()) && + Objects.equals(getShard(), other.getShard()) && + Objects.equals(isValid(), other.isValid()) && + Objects.equals(getError(), other.getError()) && + Objects.equals(getExplanation(), other.getExplanation()); + } + + @Override + public int hashCode() { + return Objects.hash(getIndex(), getShard(), isValid(), getError(), getExplanation()); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java index 5953a5548c465..7694e7583c898 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java @@ -27,6 +27,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -38,7 +40,7 @@ *

    * The request requires the query to be set using {@link #query(QueryBuilder)} */ -public class ValidateQueryRequest extends BroadcastRequest { +public class ValidateQueryRequest extends BroadcastRequest implements ToXContentObject { private QueryBuilder query = new MatchAllQueryBuilder(); @@ -179,4 +181,12 @@ public String toString() { return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", query[" + query + "], explain:" + explain + ", rewrite:" + rewrite + ", all_shards:" + allShards; } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("query"); + query.toXContent(builder, params); + return builder.endObject(); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java index 5bb11dd56e00b..f766e1d9c6aa4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java @@ -21,16 +21,22 @@ import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import static org.elasticsearch.action.admin.indices.validate.query.QueryExplanation.readQueryExplanation; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * The response of the validate action. @@ -39,12 +45,33 @@ */ public class ValidateQueryResponse extends BroadcastResponse { - public static final String INDEX_FIELD = "index"; - public static final String SHARD_FIELD = "shard"; public static final String VALID_FIELD = "valid"; public static final String EXPLANATIONS_FIELD = "explanations"; - public static final String ERROR_FIELD = "error"; - public static final String EXPLANATION_FIELD = "explanation"; + + @SuppressWarnings("unchecked") + static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "validate_query", + true, + arg -> { + BroadcastResponse response = (BroadcastResponse) arg[0]; + return + new ValidateQueryResponse( + (boolean)arg[1], + (List)arg[2], + response.getTotalShards(), + response.getSuccessfulShards(), + response.getFailedShards(), + Arrays.asList(response.getShardFailures()) + ); + } + ); + static { + declareBroadcastFields(PARSER); + PARSER.declareBoolean(constructorArg(), new ParseField(VALID_FIELD)); + PARSER.declareObjectArray( + optionalConstructorArg(), QueryExplanation.PARSER, new ParseField(EXPLANATIONS_FIELD) + ); + } private boolean valid; @@ -112,22 +139,14 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t builder.startArray(EXPLANATIONS_FIELD); for (QueryExplanation explanation : getQueryExplanation()) { builder.startObject(); - if (explanation.getIndex() != null) { - builder.field(INDEX_FIELD, explanation.getIndex()); - } - if(explanation.getShard() >= 0) { - builder.field(SHARD_FIELD, explanation.getShard()); - } - builder.field(VALID_FIELD, explanation.isValid()); - if (explanation.getError() != null) { - builder.field(ERROR_FIELD, explanation.getError()); - } - if (explanation.getExplanation() != null) { - builder.field(EXPLANATION_FIELD, explanation.getExplanation()); - } + explanation.toXContent(builder, params); builder.endObject(); } builder.endArray(); } } + + public static ValidateQueryResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java index 57486396f911b..d1a97d74d047f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.rest.action.admin.indices; +import org.elasticsearch.action.admin.indices.validate.query.QueryExplanation; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.action.support.IndicesOptions; @@ -101,7 +102,7 @@ private static BytesRestResponse buildErrorResponse(XContentBuilder builder, Str builder.startObject(); builder.field(ValidateQueryResponse.VALID_FIELD, false); if (explain) { - builder.field(ValidateQueryResponse.ERROR_FIELD, error); + builder.field(QueryExplanation.ERROR_FIELD, error); } builder.endObject(); return new BytesRestResponse(OK, builder); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanationTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanationTests.java new file mode 100644 index 0000000000000..db167e0c7669e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanationTests.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.validate.query; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; + +public class QueryExplanationTests extends AbstractStreamableXContentTestCase { + + static QueryExplanation createRandomQueryExplanation(boolean isValid) { + String index = "index_" + randomInt(1000); + int shard = randomInt(100); + Boolean valid = isValid; + String errorField = null; + if (!valid) { + errorField = randomAlphaOfLength(randomIntBetween(10, 100)); + } + String explanation = randomAlphaOfLength(randomIntBetween(10, 100)); + return new QueryExplanation(index, shard, valid, explanation, errorField); + } + + static QueryExplanation createRandomQueryExplanation() { + return createRandomQueryExplanation(randomBoolean()); + } + + @Override + protected QueryExplanation doParseInstance(XContentParser parser) throws IOException { + return QueryExplanation.fromXContent(parser); + } + + @Override + protected QueryExplanation createBlankInstance() { + return new QueryExplanation(); + } + + @Override + protected QueryExplanation createTestInstance() { + return createRandomQueryExplanation(); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java new file mode 100644 index 0000000000000..d72aae8fa2bd1 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.validate.query; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.AbstractBroadcastResponseTestCase; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class ValidateQueryResponseTests extends AbstractBroadcastResponseTestCase { + + private static ValidateQueryResponse createRandomValidateQueryResponse( + int totalShards, int successfulShards, int failedShards, List failures) { + boolean valid = failedShards == 0; + List queryExplanations = new ArrayList<>(totalShards); + for(DefaultShardOperationFailedException failure: failures) { + queryExplanations.add( + new QueryExplanation( + failure.index(), failure.shardId(), false, failure.reason(), null + ) + ); + } + return new ValidateQueryResponse( + valid, queryExplanations, totalShards, successfulShards, failedShards, failures + ); + } + + private static ValidateQueryResponse createRandomValidateQueryResponse() { + int totalShards = randomIntBetween(1, 10); + int successfulShards = randomIntBetween(0, totalShards); + int failedShards = totalShards - successfulShards; + boolean valid = failedShards == 0; + List queryExplanations = new ArrayList<>(totalShards); + List shardFailures = new ArrayList<>(failedShards); + for (int i=0; i queryExplSet = new HashSet<>(response.getQueryExplanation()); + assertEquals(response.isValid(), parsedResponse.isValid()); + assertEquals(response.getQueryExplanation().size(), parsedResponse.getQueryExplanation().size()); + assertTrue(queryExplSet.containsAll(parsedResponse.getQueryExplanation())); + } + + @Override + protected ValidateQueryResponse createTestInstance(int totalShards, int successfulShards, int failedShards, + List failures) { + return createRandomValidateQueryResponse(totalShards, successfulShards, failedShards, failures); + } + + @Override + public void testToXContent() { + ValidateQueryResponse response = createTestInstance(10, 10, 0, new ArrayList<>()); + String output = Strings.toString(response); + assertEquals("{\"_shards\":{\"total\":10,\"successful\":10,\"failed\":0},\"valid\":true}", output); + } +} From 1502812c1a4b7dd08417a45798b8fe66311e7028 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Mon, 18 Jun 2018 10:01:28 -0400 Subject: [PATCH 15/92] Percentile/Ranks should return null instead of NaN when empty (#30460) The other metric aggregations (min/max/etc) return `null` as their XContent value and string when nothing was computed (due to empty/missing fields). Percentiles and Percentile Ranks, however, return `NaN `which is inconsistent and confusing for the user. This fixes the inconsistency by making the aggs return `null`. This applies to both the numeric value and the "as string" value. Note: like the metric aggs, this does not change the value if fetched directly from the percentiles object, which will return as `NaN`/`"NaN"`. This only changes the XContent output. While this is a bugfix, it still breaks bwc in a minor way as the response changes from prior version. Closes #29066 --- .../release-notes/7.0.0-alpha1.asciidoc | 6 ++ .../elasticsearch/search/DocValueFormat.java | 16 +++++ .../percentiles/ParsedPercentiles.java | 11 ++-- .../hdr/AbstractInternalHDRPercentiles.java | 10 ++-- .../AbstractInternalTDigestPercentiles.java | 10 ++-- .../AbstractPercentilesTestCase.java | 58 ++++++++++++++++++- .../InternalPercentilesRanksTestCase.java | 8 +++ .../InternalPercentilesTestCase.java | 8 +++ .../hdr/InternalHDRPercentilesRanksTests.java | 1 + 9 files changed, 112 insertions(+), 16 deletions(-) diff --git a/docs/reference/release-notes/7.0.0-alpha1.asciidoc b/docs/reference/release-notes/7.0.0-alpha1.asciidoc index 1cc328f16598b..cf2e1e30be050 100644 --- a/docs/reference/release-notes/7.0.0-alpha1.asciidoc +++ b/docs/reference/release-notes/7.0.0-alpha1.asciidoc @@ -16,3 +16,9 @@ Cross-Cluster-Search:: Rest API:: * The Clear Cache API only supports `POST` as HTTP method + +Aggregations:: +* The Percentiles and PercentileRanks aggregations now return `null` in the REST response, + instead of `NaN`. This makes it consistent with the rest of the aggregations. Note: + this only applies to the REST response, the java objects continue to return `NaN` (also + consistent with other aggregations) \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java index 242e088747341..3a3b1c680aba1 100644 --- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -394,6 +394,22 @@ public String format(long value) { @Override public String format(double value) { + /** + * Explicitly check for NaN, since it formats to "�" or "NaN" depending on JDK version. + * + * Decimal formatter uses the JRE's default symbol list (via Locale.ROOT above). In JDK8, + * this translates into using {@link sun.util.locale.provider.JRELocaleProviderAdapter}, which loads + * {@link sun.text.resources.FormatData} for symbols. There, `NaN` is defined as `\ufffd` (�) + * + * In JDK9+, {@link sun.util.cldr.CLDRLocaleProviderAdapter} is used instead, which loads + * {@link sun.text.resources.cldr.FormatData}. There, `NaN` is defined as `"NaN"` + * + * Since the character � isn't very useful, and makes the output change depending on JDK version, + * we manually check to see if the value is NaN and return the string directly. + */ + if (Double.isNaN(value)) { + return String.valueOf(Double.NaN); + } return format.format(value); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentiles.java index 3f56b21dcd8a0..2c7da76446d5a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentiles.java @@ -92,9 +92,9 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) builder.startObject(CommonFields.VALUES.getPreferredName()); for (Map.Entry percentile : percentiles.entrySet()) { Double key = percentile.getKey(); - builder.field(String.valueOf(key), percentile.getValue()); - - if (valuesAsString) { + Double value = percentile.getValue(); + builder.field(String.valueOf(key), value.isNaN() ? null : value); + if (valuesAsString && value.isNaN() == false) { builder.field(key + "_as_string", getPercentileAsString(key)); } } @@ -106,8 +106,9 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) builder.startObject(); { builder.field(CommonFields.KEY.getPreferredName(), key); - builder.field(CommonFields.VALUE.getPreferredName(), percentile.getValue()); - if (valuesAsString) { + Double value = percentile.getValue(); + builder.field(CommonFields.VALUE.getPreferredName(), value.isNaN() ? null : value); + if (valuesAsString && value.isNaN() == false) { builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), getPercentileAsString(key)); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractInternalHDRPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractInternalHDRPercentiles.java index 48d35de6cb6ab..a7b359d59373c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractInternalHDRPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractInternalHDRPercentiles.java @@ -123,9 +123,9 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th for(int i = 0; i < keys.length; ++i) { String key = String.valueOf(keys[i]); double value = value(keys[i]); - builder.field(key, value); - if (format != DocValueFormat.RAW) { - builder.field(key + "_as_string", format.format(value)); + builder.field(key, state.getTotalCount() == 0 ? null : value); + if (format != DocValueFormat.RAW && state.getTotalCount() > 0) { + builder.field(key + "_as_string", format.format(value).toString()); } } builder.endObject(); @@ -135,8 +135,8 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th double value = value(keys[i]); builder.startObject(); builder.field(CommonFields.KEY.getPreferredName(), keys[i]); - builder.field(CommonFields.VALUE.getPreferredName(), value); - if (format != DocValueFormat.RAW) { + builder.field(CommonFields.VALUE.getPreferredName(), state.getTotalCount() == 0 ? null : value); + if (format != DocValueFormat.RAW && state.getTotalCount() > 0) { builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value).toString()); } builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractInternalTDigestPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractInternalTDigestPercentiles.java index 3806d7feb9550..0938710406a7b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractInternalTDigestPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractInternalTDigestPercentiles.java @@ -106,9 +106,9 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th for(int i = 0; i < keys.length; ++i) { String key = String.valueOf(keys[i]); double value = value(keys[i]); - builder.field(key, value); - if (format != DocValueFormat.RAW) { - builder.field(key + "_as_string", format.format(value)); + builder.field(key, state.size() == 0 ? null : value); + if (format != DocValueFormat.RAW && state.size() > 0) { + builder.field(key + "_as_string", format.format(value).toString()); } } builder.endObject(); @@ -118,8 +118,8 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th double value = value(keys[i]); builder.startObject(); builder.field(CommonFields.KEY.getPreferredName(), keys[i]); - builder.field(CommonFields.VALUE.getPreferredName(), value); - if (format != DocValueFormat.RAW) { + builder.field(CommonFields.VALUE.getPreferredName(), state.size() == 0 ? null : value); + if (format != DocValueFormat.RAW && state.size() > 0) { builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value).toString()); } builder.endObject(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesTestCase.java index e54a2a8b9a14f..c4a3d3b2ffcef 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesTestCase.java @@ -19,6 +19,10 @@ package org.elasticsearch.search.aggregations.metrics.percentiles; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregation.CommonFields; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -27,11 +31,14 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.function.Predicate; +import static org.hamcrest.Matchers.equalTo; + public abstract class AbstractPercentilesTestCase> extends InternalAggregationTestCase { @@ -49,7 +56,7 @@ public void setUp() throws Exception { @Override protected T createTestInstance(String name, List pipelineAggregators, Map metaData) { - int numValues = randomInt(100); + int numValues = frequently() ? randomInt(100) : 0; double[] values = new double[numValues]; for (int i = 0; i < numValues; ++i) { values[i] = randomDouble(); @@ -89,4 +96,53 @@ public static double[] randomPercents(boolean sorted) { protected Predicate excludePathsFromXContentInsertion() { return path -> path.endsWith(CommonFields.VALUES.getPreferredName()); } + + protected abstract void assertPercentile(T agg, Double value); + + public void testEmptyRanksXContent() throws IOException { + double[] percents = new double[]{1,2,3}; + boolean keyed = randomBoolean(); + DocValueFormat docValueFormat = randomNumericDocValueFormat(); + + T agg = createTestInstance("test", Collections.emptyList(), Collections.emptyMap(), keyed, docValueFormat, percents, new double[0]); + + for (Percentile percentile : agg) { + Double value = percentile.getValue(); + assertPercentile(agg, value); + } + + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); + builder.startObject(); + agg.doXContentBody(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + String expected; + if (keyed) { + expected = "{\n" + + " \"values\" : {\n" + + " \"1.0\" : null,\n" + + " \"2.0\" : null,\n" + + " \"3.0\" : null\n" + + " }\n" + + "}"; + } else { + expected = "{\n" + + " \"values\" : [\n" + + " {\n" + + " \"key\" : 1.0,\n" + + " \"value\" : null\n" + + " },\n" + + " {\n" + + " \"key\" : 2.0,\n" + + " \"value\" : null\n" + + " },\n" + + " {\n" + + " \"key\" : 3.0,\n" + + " \"value\" : null\n" + + " }\n" + + " ]\n" + + "}"; + } + + assertThat(Strings.toString(builder), equalTo(expected)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesRanksTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesRanksTestCase.java index f45b7cce51e37..a63fd42da7d96 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesRanksTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesRanksTestCase.java @@ -22,6 +22,8 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; +import static org.hamcrest.Matchers.equalTo; + public abstract class InternalPercentilesRanksTestCase extends AbstractPercentilesTestCase { @@ -39,4 +41,10 @@ protected final void assertFromXContent(T aggregation, ParsedAggregation parsedA Class parsedClass = implementationClass(); assertTrue(parsedClass != null && parsedClass.isInstance(parsedAggregation)); } + + @Override + protected void assertPercentile(T agg, Double value) { + assertThat(agg.percent(value), equalTo(Double.NaN)); + assertThat(agg.percentAsString(value), equalTo("NaN")); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java index be105f2af80b6..1024577a6b6ed 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java @@ -24,6 +24,8 @@ import java.util.List; +import static org.hamcrest.Matchers.equalTo; + public abstract class InternalPercentilesTestCase extends AbstractPercentilesTestCase { @Override @@ -49,4 +51,10 @@ public static double[] randomPercents() { } return percents; } + + @Override + protected void assertPercentile(T agg, Double value) { + assertThat(agg.percentile(value), equalTo(Double.NaN)); + assertThat(agg.percentileAsString(value), equalTo("NaN")); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesRanksTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesRanksTests.java index dcbd5cdbd5a3a..ee0e3602f2039 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesRanksTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesRanksTests.java @@ -31,6 +31,7 @@ import java.util.List; import java.util.Map; + public class InternalHDRPercentilesRanksTests extends InternalPercentilesRanksTestCase { @Override From 02a4ef38a7960895b4e26111167676ec49302cba Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 18 Jun 2018 16:46:04 +0200 Subject: [PATCH 16/92] Use system context for cluster state update tasks (#31241) This commit makes it so that cluster state update tasks always run under the system context, only restoring the original context when the listener that was provided with the task is called. A notable exception is the clusterStatePublished(...) callback which will still run under system context, because it's defined on the executor-level, and not the task level, and only called once for the combined batch of tasks and can therefore not be uniquely identified with a task / thread context. Relates #30603 --- .../cluster/ClusterStateTaskExecutor.java | 3 + .../cluster/ClusterStateUpdateTask.java | 6 ++ .../cluster/service/MasterService.java | 35 ++++--- .../transport/RemoteClusterConnection.java | 2 - .../cluster/service/MasterServiceTests.java | 82 +++++++++++++++++ .../xpack/core/ml/MlMetadata.java | 13 ++- .../core/ml/datafeed/DatafeedUpdate.java | 9 +- .../core/ml/datafeed/DatafeedUpdateTests.java | 8 +- .../ml/action/TransportDeleteJobAction.java | 3 +- .../ml/action/TransportPutDatafeedAction.java | 14 +-- .../action/TransportUpdateDatafeedAction.java | 6 +- .../xpack/ml/job/JobManager.java | 5 +- .../xpack/ml/MlMetadataTests.java | 92 ++++++++++++++----- .../action/TransportCloseJobActionTests.java | 2 +- .../TransportStartDatafeedActionTests.java | 6 +- .../TransportStopDatafeedActionTests.java | 12 +-- .../ml/datafeed/DatafeedManagerTests.java | 2 +- .../datafeed/DatafeedNodeSelectorTests.java | 23 +++-- .../xpack/ml/integration/DeleteJobIT.java | 5 +- 19 files changed, 236 insertions(+), 92 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java index 024389dd22c7f..6c536a7019bb0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java @@ -41,6 +41,9 @@ default boolean runOnlyOnMaster() { /** * Callback invoked after new cluster state is published. Note that * this method is not invoked if the cluster state was not updated. + * + * Note that this method will be executed using system context. + * * @param clusterChangedEvent the change event for this cluster state change, containing * both old and new states */ diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateUpdateTask.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateUpdateTask.java index b298e7e915dea..9dc9c7f6f52d0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateUpdateTask.java @@ -62,6 +62,12 @@ public String describeTasks(List tasks) { */ public abstract void onFailure(String source, Exception e); + @Override + public final void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { + // final, empty implementation here as this method should only be defined in combination + // with a batching executor as it will always be executed within the system context. + } + /** * If the cluster state update task wasn't processed by the provided timeout, call * {@link ClusterStateTaskListener#onFailure(String, Exception)}. May return null to indicate no timeout is needed (default). diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index 4432d864fd36a..2543be4811c1e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.threadpool.ThreadPool; @@ -59,6 +60,7 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; +import java.util.function.Supplier; import java.util.stream.Collectors; import static org.elasticsearch.cluster.service.ClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING; @@ -426,26 +428,28 @@ public TimeValue getMaxTaskWaitTime() { return threadPoolExecutor.getMaxTaskWaitTime(); } - private SafeClusterStateTaskListener safe(ClusterStateTaskListener listener) { + private SafeClusterStateTaskListener safe(ClusterStateTaskListener listener, Supplier contextSupplier) { if (listener instanceof AckedClusterStateTaskListener) { - return new SafeAckedClusterStateTaskListener((AckedClusterStateTaskListener) listener, logger); + return new SafeAckedClusterStateTaskListener((AckedClusterStateTaskListener) listener, contextSupplier, logger); } else { - return new SafeClusterStateTaskListener(listener, logger); + return new SafeClusterStateTaskListener(listener, contextSupplier, logger); } } private static class SafeClusterStateTaskListener implements ClusterStateTaskListener { private final ClusterStateTaskListener listener; + protected final Supplier context; private final Logger logger; - SafeClusterStateTaskListener(ClusterStateTaskListener listener, Logger logger) { + SafeClusterStateTaskListener(ClusterStateTaskListener listener, Supplier context, Logger logger) { this.listener = listener; + this.context = context; this.logger = logger; } @Override public void onFailure(String source, Exception e) { - try { + try (ThreadContext.StoredContext ignore = context.get()) { listener.onFailure(source, e); } catch (Exception inner) { inner.addSuppressed(e); @@ -456,7 +460,7 @@ public void onFailure(String source, Exception e) { @Override public void onNoLongerMaster(String source) { - try { + try (ThreadContext.StoredContext ignore = context.get()) { listener.onNoLongerMaster(source); } catch (Exception e) { logger.error(() -> new ParameterizedMessage( @@ -466,7 +470,7 @@ public void onNoLongerMaster(String source) { @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - try { + try (ThreadContext.StoredContext ignore = context.get()) { listener.clusterStateProcessed(source, oldState, newState); } catch (Exception e) { logger.error(() -> new ParameterizedMessage( @@ -480,8 +484,9 @@ private static class SafeAckedClusterStateTaskListener extends SafeClusterStateT private final AckedClusterStateTaskListener listener; private final Logger logger; - SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, Logger logger) { - super(listener, logger); + SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, Supplier context, + Logger logger) { + super(listener, context, logger); this.listener = listener; this.logger = logger; } @@ -493,7 +498,7 @@ public boolean mustAck(DiscoveryNode discoveryNode) { @Override public void onAllNodesAcked(@Nullable Exception e) { - try { + try (ThreadContext.StoredContext ignore = context.get()) { listener.onAllNodesAcked(e); } catch (Exception inner) { inner.addSuppressed(e); @@ -503,7 +508,7 @@ public void onAllNodesAcked(@Nullable Exception e) { @Override public void onAckTimeout() { - try { + try (ThreadContext.StoredContext ignore = context.get()) { listener.onAckTimeout(); } catch (Exception e) { logger.error("exception thrown by listener while notifying on ack timeout", e); @@ -724,9 +729,13 @@ public void submitStateUpdateTasks(final String source, if (!lifecycle.started()) { return; } - try { + final ThreadContext threadContext = threadPool.getThreadContext(); + final Supplier supplier = threadContext.newRestorableContext(false); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + threadContext.markAsSystemContext(); + List safeTasks = tasks.entrySet().stream() - .map(e -> taskBatcher.new UpdateTask(config.priority(), source, e.getKey(), safe(e.getValue()), executor)) + .map(e -> taskBatcher.new UpdateTask(config.priority(), source, e.getKey(), safe(e.getValue(), supplier), executor)) .collect(Collectors.toList()); taskBatcher.submitTasks(safeTasks, config.timeout()); } catch (EsRejectedExecutionException e) { diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index e37f46c5517db..c86ea61980a87 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -556,7 +556,6 @@ public ClusterStateResponse newInstance() { @Override public void handleResponse(ClusterStateResponse response) { - assert transportService.getThreadPool().getThreadContext().isSystemContext() == false : "context is a system context"; try { if (remoteClusterName.get() == null) { assert response.getClusterName().value() != null; @@ -597,7 +596,6 @@ public void handleResponse(ClusterStateResponse response) { @Override public void handleException(TransportException exp) { - assert transportService.getThreadPool().getThreadContext().isSystemContext() == false : "context is a system context"; logger.warn(() -> new ParameterizedMessage("fetching nodes from external cluster {} failed", clusterAlias), exp); try { IOUtils.closeWhileHandlingException(connection); diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index f75363c7ab5c7..20587d31f5359 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -34,12 +34,14 @@ import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.BaseFuture; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; @@ -52,6 +54,7 @@ import org.junit.BeforeClass; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -168,6 +171,85 @@ public void onFailure(String source, Exception e) { nonMaster.close(); } + public void testThreadContext() throws InterruptedException { + final TimedMasterService master = createTimedMasterService(true); + final CountDownLatch latch = new CountDownLatch(1); + + try (ThreadContext.StoredContext ignored = threadPool.getThreadContext().stashContext()) { + final Map expectedHeaders = Collections.singletonMap("test", "test"); + threadPool.getThreadContext().putHeader(expectedHeaders); + + final TimeValue ackTimeout = randomBoolean() ? TimeValue.ZERO : TimeValue.timeValueMillis(randomInt(10000)); + final TimeValue masterTimeout = randomBoolean() ? TimeValue.ZERO : TimeValue.timeValueMillis(randomInt(10000)); + + master.submitStateUpdateTask("test", new AckedClusterStateUpdateTask(null, null) { + @Override + public ClusterState execute(ClusterState currentState) { + assertTrue(threadPool.getThreadContext().isSystemContext()); + assertEquals(Collections.emptyMap(), threadPool.getThreadContext().getHeaders()); + + if (randomBoolean()) { + return ClusterState.builder(currentState).build(); + } else if (randomBoolean()) { + return currentState; + } else { + throw new IllegalArgumentException("mock failure"); + } + } + + @Override + public void onFailure(String source, Exception e) { + assertFalse(threadPool.getThreadContext().isSystemContext()); + assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + latch.countDown(); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + assertFalse(threadPool.getThreadContext().isSystemContext()); + assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + latch.countDown(); + } + + @Override + protected Void newResponse(boolean acknowledged) { + return null; + } + + public TimeValue ackTimeout() { + return ackTimeout; + } + + @Override + public TimeValue timeout() { + return masterTimeout; + } + + @Override + public void onAllNodesAcked(@Nullable Exception e) { + assertFalse(threadPool.getThreadContext().isSystemContext()); + assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + latch.countDown(); + } + + @Override + public void onAckTimeout() { + assertFalse(threadPool.getThreadContext().isSystemContext()); + assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + latch.countDown(); + } + + }); + + assertFalse(threadPool.getThreadContext().isSystemContext()); + assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + } + + latch.await(); + + master.close(); + } + /* * test that a listener throwing an exception while handling a * notification does not prevent publication notification to the diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java index 5e145306f8c1f..85e5c99fe3581 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -293,7 +292,7 @@ public Builder deleteJob(String jobId, PersistentTasksCustomMetaData tasks) { return this; } - public Builder putDatafeed(DatafeedConfig datafeedConfig, ThreadContext threadContext) { + public Builder putDatafeed(DatafeedConfig datafeedConfig, Map headers) { if (datafeeds.containsKey(datafeedConfig.getId())) { throw new ResourceAlreadyExistsException("A datafeed with id [" + datafeedConfig.getId() + "] already exists"); } @@ -302,13 +301,13 @@ public Builder putDatafeed(DatafeedConfig datafeedConfig, ThreadContext threadCo Job job = jobs.get(jobId); DatafeedJobValidator.validate(datafeedConfig, job); - if (threadContext != null) { + if (headers.isEmpty() == false) { // Adjust the request, adding security headers from the current thread context DatafeedConfig.Builder builder = new DatafeedConfig.Builder(datafeedConfig); - Map headers = threadContext.getHeaders().entrySet().stream() + Map securityHeaders = headers.entrySet().stream() .filter(e -> ClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - builder.setHeaders(headers); + builder.setHeaders(securityHeaders); datafeedConfig = builder.build(); } @@ -328,7 +327,7 @@ private void checkJobIsAvailableForDatafeed(String jobId) { } } - public Builder updateDatafeed(DatafeedUpdate update, PersistentTasksCustomMetaData persistentTasks, ThreadContext threadContext) { + public Builder updateDatafeed(DatafeedUpdate update, PersistentTasksCustomMetaData persistentTasks, Map headers) { String datafeedId = update.getId(); DatafeedConfig oldDatafeedConfig = datafeeds.get(datafeedId); if (oldDatafeedConfig == null) { @@ -336,7 +335,7 @@ public Builder updateDatafeed(DatafeedUpdate update, PersistentTasksCustomMetaDa } checkDatafeedIsStopped(() -> Messages.getMessage(Messages.DATAFEED_CANNOT_UPDATE_IN_CURRENT_STATE, datafeedId, DatafeedState.STARTED), datafeedId, persistentTasks); - DatafeedConfig newDatafeedConfig = update.apply(oldDatafeedConfig, threadContext); + DatafeedConfig newDatafeedConfig = update.apply(oldDatafeedConfig, headers); if (newDatafeedConfig.getJobId().equals(oldDatafeedConfig.getJobId()) == false) { checkJobIsAvailableForDatafeed(newDatafeedConfig.getJobId()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java index 444532a7e3f15..27498bd1549ee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -264,7 +263,7 @@ ChunkingConfig getChunkingConfig() { * Applies the update to the given {@link DatafeedConfig} * @return a new {@link DatafeedConfig} that contains the update */ - public DatafeedConfig apply(DatafeedConfig datafeedConfig, ThreadContext threadContext) { + public DatafeedConfig apply(DatafeedConfig datafeedConfig, Map headers) { if (id.equals(datafeedConfig.getId()) == false) { throw new IllegalArgumentException("Cannot apply update to datafeedConfig with different id"); } @@ -301,12 +300,12 @@ public DatafeedConfig apply(DatafeedConfig datafeedConfig, ThreadContext threadC builder.setChunkingConfig(chunkingConfig); } - if (threadContext != null) { + if (headers.isEmpty() == false) { // Adjust the request, adding security headers from the current thread context - Map headers = threadContext.getHeaders().entrySet().stream() + Map securityHeaders = headers.entrySet().stream() .filter(e -> ClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - builder.setHeaders(headers); + builder.setHeaders(securityHeaders); } return builder.build(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index d059e567d1588..358f9d1c97bd7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -114,7 +114,7 @@ public void testApply_failBecauseTargetDatafeedHasDifferentId() { public void testApply_givenEmptyUpdate() { DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo"); - DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).build().apply(datafeed, null); + DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).build().apply(datafeed, Collections.emptyMap()); assertThat(datafeed, equalTo(updatedDatafeed)); } @@ -125,7 +125,7 @@ public void testApply_givenPartialUpdate() { DatafeedUpdate.Builder updated = new DatafeedUpdate.Builder(datafeed.getId()); updated.setScrollSize(datafeed.getScrollSize() + 1); - DatafeedConfig updatedDatafeed = update.build().apply(datafeed, null); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); DatafeedConfig.Builder expectedDatafeed = new DatafeedConfig.Builder(datafeed); expectedDatafeed.setScrollSize(datafeed.getScrollSize() + 1); @@ -149,7 +149,7 @@ public void testApply_givenFullUpdateNoAggregations() { update.setScrollSize(8000); update.setChunkingConfig(ChunkingConfig.newManual(TimeValue.timeValueHours(1))); - DatafeedConfig updatedDatafeed = update.build().apply(datafeed, null); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); assertThat(updatedDatafeed.getJobId(), equalTo("bar")); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_2"))); @@ -175,7 +175,7 @@ public void testApply_givenAggregations() { update.setAggregations(new AggregatorFactories.Builder().addAggregator( AggregationBuilders.histogram("a").interval(300000).field("time").subAggregation(maxTime))); - DatafeedConfig updatedDatafeed = update.build().apply(datafeed, null); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_1"))); assertThat(updatedDatafeed.getTypes(), equalTo(Collections.singletonList("t_1"))); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index 81f4a90f575af..ede92fbbab950 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -213,7 +212,7 @@ public void onFailure(String source, Exception e) { } @Override - public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { logger.debug("Job [" + jobId + "] is successfully marked as deleted"); listener.onResponse(true); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java index 08a9dfb09c1d9..88c72578023f9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.core.security.support.Exceptions; import java.io.IOException; +import java.util.Map; public class TransportPutDatafeedAction extends TransportMasterNodeAction { @@ -95,7 +96,7 @@ protected void masterOperation(PutDatafeedAction.Request request, ClusterState s client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); } else { - putDatafeed(request, listener); + putDatafeed(request, threadPool.getThreadContext().getHeaders(), listener); } } @@ -103,7 +104,7 @@ private void handlePrivsResponse(String username, PutDatafeedAction.Request requ HasPrivilegesResponse response, ActionListener listener) throws IOException { if (response.isCompleteMatch()) { - putDatafeed(request, listener); + putDatafeed(request, threadPool.getThreadContext().getHeaders(), listener); } else { XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); @@ -120,7 +121,8 @@ private void handlePrivsResponse(String username, PutDatafeedAction.Request requ } } - private void putDatafeed(PutDatafeedAction.Request request, ActionListener listener) { + private void putDatafeed(PutDatafeedAction.Request request, Map headers, + ActionListener listener) { clusterService.submitStateUpdateTask( "put-datafeed-" + request.getDatafeed().getId(), @@ -136,16 +138,16 @@ protected PutDatafeedAction.Response newResponse(boolean acknowledged) { @Override public ClusterState execute(ClusterState currentState) { - return putDatafeed(request, currentState); + return putDatafeed(request, headers, currentState); } }); } - private ClusterState putDatafeed(PutDatafeedAction.Request request, ClusterState clusterState) { + private ClusterState putDatafeed(PutDatafeedAction.Request request, Map headers, ClusterState clusterState) { XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); MlMetadata currentMetadata = MlMetadata.getMlMetadata(clusterState); MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) - .putDatafeed(request.getDatafeed(), threadPool.getThreadContext()).build(); + .putDatafeed(request.getDatafeed(), headers).build(); return ClusterState.builder(clusterState).metaData( MetaData.builder(clusterState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build()) .build(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java index 4d752fe294081..4e43cbb185330 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java @@ -27,6 +27,8 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import java.util.Map; + public class TransportUpdateDatafeedAction extends TransportMasterNodeAction { @Inject @@ -50,6 +52,8 @@ protected PutDatafeedAction.Response newResponse() { @Override protected void masterOperation(UpdateDatafeedAction.Request request, ClusterState state, ActionListener listener) { + final Map headers = threadPool.getThreadContext().getHeaders(); + clusterService.submitStateUpdateTask("update-datafeed-" + request.getUpdate().getId(), new AckedClusterStateUpdateTask(request, listener) { private volatile DatafeedConfig updatedDatafeed; @@ -69,7 +73,7 @@ public ClusterState execute(ClusterState currentState) { PersistentTasksCustomMetaData persistentTasks = currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) - .updateDatafeed(update, persistentTasks, threadPool.getThreadContext()).build(); + .updateDatafeed(update, persistentTasks, headers).build(); updatedDatafeed = newMetadata.getDatafeed(update.getId()); return ClusterState.builder(currentState).metaData( MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build()).build(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index 1fd73b96667b2..fe6deea55e3aa 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.MetaData; @@ -347,8 +346,8 @@ public void onFailure(String source, Exception e) { } @Override - public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { - afterClusterStateUpdate(clusterChangedEvent.state(), request); + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + afterClusterStateUpdate(newState, request); actionListener.onResponse(new PutJobAction.Response(updatedJob.get())); } }); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java index f6fb2db3c9bb9..ecfe712858331 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java @@ -30,9 +30,11 @@ import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.config.JobTests; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField; import java.util.Collections; import java.util.Date; +import java.util.HashMap; import java.util.Map; import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; @@ -42,6 +44,7 @@ import static org.elasticsearch.xpack.ml.datafeed.DatafeedManagerTests.createDatafeedJob; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; @@ -63,7 +66,7 @@ protected MlMetadata createTestInstance() { } job = new Job.Builder(job).setAnalysisConfig(analysisConfig).build(); builder.putJob(job, false); - builder.putDatafeed(datafeedConfig, null); + builder.putDatafeed(datafeedConfig, Collections.emptyMap()); } else { builder.putJob(job, false); } @@ -164,7 +167,7 @@ public void testRemoveJob_failDatafeedRefersToJob() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder.deleteJob(job1.getId(), new PersistentTasksCustomMetaData(0L, Collections.emptyMap()))); @@ -184,7 +187,7 @@ public void testCrudDatafeed() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); MlMetadata result = builder.build(); assertThat(result.getJobs().get("job_id"), sameInstance(job1)); @@ -201,7 +204,7 @@ public void testPutDatafeed_failBecauseJobDoesNotExist() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", "missing-job").build(); MlMetadata.Builder builder = new MlMetadata.Builder(); - expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, null)); + expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, Collections.emptyMap())); } public void testPutDatafeed_failBecauseJobIsBeingDeleted() { @@ -210,7 +213,7 @@ public void testPutDatafeed_failBecauseJobIsBeingDeleted() { MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, null)); + expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, Collections.emptyMap())); } public void testPutDatafeed_failBecauseDatafeedIdIsAlreadyTaken() { @@ -218,9 +221,9 @@ public void testPutDatafeed_failBecauseDatafeedIdIsAlreadyTaken() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); - expectThrows(ResourceAlreadyExistsException.class, () -> builder.putDatafeed(datafeedConfig1, null)); + expectThrows(ResourceAlreadyExistsException.class, () -> builder.putDatafeed(datafeedConfig1, Collections.emptyMap())); } public void testPutDatafeed_failBecauseJobAlreadyHasDatafeed() { @@ -229,10 +232,10 @@ public void testPutDatafeed_failBecauseJobAlreadyHasDatafeed() { DatafeedConfig datafeedConfig2 = createDatafeedConfig("datafeed2", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, - () -> builder.putDatafeed(datafeedConfig2, null)); + () -> builder.putDatafeed(datafeedConfig2, Collections.emptyMap())); assertThat(e.status(), equalTo(RestStatus.CONFLICT)); } @@ -246,7 +249,23 @@ public void testPutDatafeed_failBecauseJobIsNotCompatibleForDatafeed() { MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1.build(now), false); - expectThrows(ElasticsearchStatusException.class, () -> builder.putDatafeed(datafeedConfig1, null)); + expectThrows(ElasticsearchStatusException.class, () -> builder.putDatafeed(datafeedConfig1, Collections.emptyMap())); + } + + public void testPutDatafeed_setsSecurityHeaders() { + Job datafeedJob = createDatafeedJob().build(new Date()); + DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed1", datafeedJob.getId()).build(); + MlMetadata.Builder builder = new MlMetadata.Builder(); + builder.putJob(datafeedJob, false); + + Map headers = new HashMap<>(); + headers.put("unrelated_header", "unrelated_header_value"); + headers.put(AuthenticationServiceField.RUN_AS_USER_HEADER, "permitted_run_as_user"); + builder.putDatafeed(datafeedConfig, headers); + MlMetadata metadata = builder.build(); + assertThat(metadata.getDatafeed("datafeed1").getHeaders().size(), equalTo(1)); + assertThat(metadata.getDatafeed("datafeed1").getHeaders(), + hasEntry(AuthenticationServiceField.RUN_AS_USER_HEADER, "permitted_run_as_user")); } public void testUpdateDatafeed() { @@ -254,12 +273,13 @@ public void testUpdateDatafeed() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); MlMetadata beforeMetadata = builder.build(); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); update.setScrollSize(5000); - MlMetadata updatedMetadata = new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, null).build(); + MlMetadata updatedMetadata = + new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, Collections.emptyMap()).build(); DatafeedConfig updatedDatafeed = updatedMetadata.getDatafeed(datafeedConfig1.getId()); assertThat(updatedDatafeed.getJobId(), equalTo(datafeedConfig1.getJobId())); @@ -271,7 +291,8 @@ public void testUpdateDatafeed() { public void testUpdateDatafeed_failBecauseDatafeedDoesNotExist() { DatafeedUpdate.Builder update = new DatafeedUpdate.Builder("job_id"); update.setScrollSize(5000); - expectThrows(ResourceNotFoundException.class, () -> new MlMetadata.Builder().updateDatafeed(update.build(), null, null).build()); + expectThrows(ResourceNotFoundException.class, + () -> new MlMetadata.Builder().updateDatafeed(update.build(), null, Collections.emptyMap()).build()); } public void testUpdateDatafeed_failBecauseDatafeedIsNotStopped() { @@ -279,7 +300,7 @@ public void testUpdateDatafeed_failBecauseDatafeedIsNotStopped() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); MlMetadata beforeMetadata = builder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -300,14 +321,14 @@ public void testUpdateDatafeed_failBecauseNewJobIdDoesNotExist() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); MlMetadata beforeMetadata = builder.build(); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); update.setJobId(job1.getId() + "_2"); expectThrows(ResourceNotFoundException.class, - () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, null)); + () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, Collections.emptyMap())); } public void testUpdateDatafeed_failBecauseNewJobHasAnotherDatafeedAttached() { @@ -319,25 +340,46 @@ public void testUpdateDatafeed_failBecauseNewJobHasAnotherDatafeedAttached() { MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); builder.putJob(job2.build(), false); - builder.putDatafeed(datafeedConfig1, null); - builder.putDatafeed(datafeedConfig2, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); + builder.putDatafeed(datafeedConfig2, Collections.emptyMap()); MlMetadata beforeMetadata = builder.build(); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); update.setJobId(job2.getId()); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, - () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, null)); + () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, Collections.emptyMap())); assertThat(e.status(), equalTo(RestStatus.CONFLICT)); assertThat(e.getMessage(), equalTo("A datafeed [datafeed2] already exists for job [job_id_2]")); } + public void testUpdateDatafeed_setsSecurityHeaders() { + Job datafeedJob = createDatafeedJob().build(new Date()); + DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed1", datafeedJob.getId()).build(); + MlMetadata.Builder builder = new MlMetadata.Builder(); + builder.putJob(datafeedJob, false); + builder.putDatafeed(datafeedConfig, Collections.emptyMap()); + MlMetadata beforeMetadata = builder.build(); + assertTrue(beforeMetadata.getDatafeed("datafeed1").getHeaders().isEmpty()); + + DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig.getId()); + update.setQueryDelay(TimeValue.timeValueMinutes(5)); + + Map headers = new HashMap<>(); + headers.put("unrelated_header", "unrelated_header_value"); + headers.put(AuthenticationServiceField.RUN_AS_USER_HEADER, "permitted_run_as_user"); + MlMetadata afterMetadata = new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, headers).build(); + Map updatedHeaders = afterMetadata.getDatafeed("datafeed1").getHeaders(); + assertThat(updatedHeaders.size(), equalTo(1)); + assertThat(updatedHeaders, hasEntry(AuthenticationServiceField.RUN_AS_USER_HEADER, "permitted_run_as_user")); + } + public void testRemoveDatafeed_failBecauseDatafeedStarted() { Job job1 = createDatafeedJob().build(new Date()); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); MlMetadata result = builder.build(); assertThat(result.getJobs().get("job_id"), sameInstance(job1)); @@ -378,9 +420,9 @@ public void testExpandJobIds() { public void testExpandDatafeedIds() { MlMetadata.Builder mlMetadataBuilder = newMlMetadataWithJobs("bar-1", "foo-1", "foo-2"); - mlMetadataBuilder.putDatafeed(createDatafeedConfig("bar-1-feed", "bar-1").build(), null); - mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-1-feed", "foo-1").build(), null); - mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-2-feed", "foo-2").build(), null); + mlMetadataBuilder.putDatafeed(createDatafeedConfig("bar-1-feed", "bar-1").build(), Collections.emptyMap()); + mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-1-feed", "foo-1").build(), Collections.emptyMap()); + mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-2-feed", "foo-2").build(), Collections.emptyMap()); MlMetadata mlMetadata = mlMetadataBuilder.build(); @@ -409,7 +451,7 @@ protected MlMetadata mutateInstance(MlMetadata instance) { metadataBuilder.putJob(entry.getValue(), true); } for (Map.Entry entry : datafeeds.entrySet()) { - metadataBuilder.putDatafeed(entry.getValue(), null); + metadataBuilder.putDatafeed(entry.getValue(), Collections.emptyMap()); } switch (between(0, 1)) { @@ -430,7 +472,7 @@ protected MlMetadata mutateInstance(MlMetadata instance) { } randomJob = new Job.Builder(randomJob).setAnalysisConfig(analysisConfig).build(); metadataBuilder.putJob(randomJob, false); - metadataBuilder.putDatafeed(datafeedConfig, null); + metadataBuilder.putDatafeed(datafeedConfig, Collections.emptyMap()); break; default: throw new AssertionError("Illegal randomisation branch"); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java index d65fc1476e75e..0e7ad29c54da9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java @@ -51,7 +51,7 @@ public void testValidate_datafeedIsStarted() { MlMetadata.Builder mlBuilder = new MlMetadata.Builder(); mlBuilder.putJob(BaseMlIntegTestCase.createScheduledJob("job_id").build(new Date()), false); mlBuilder.putDatafeed(BaseMlIntegTestCase.createDatafeed("datafeed_id", "job_id", - Collections.singletonList("*")), null); + Collections.singletonList("*")), Collections.emptyMap()); final PersistentTasksCustomMetaData.Builder startDataFeedTaskBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", null, JobState.OPENED, startDataFeedTaskBuilder); addTask("datafeed_id", 0L, null, DatafeedState.STARTED, startDataFeedTaskBuilder); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java index af9446ed972cb..72c8d361dd882 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java @@ -45,7 +45,7 @@ public void testValidate_jobClosed() { PersistentTasksCustomMetaData tasks = PersistentTasksCustomMetaData.builder().build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) - .putDatafeed(datafeedConfig1, null) + .putDatafeed(datafeedConfig1, Collections.emptyMap()) .build(); Exception e = expectThrows(ElasticsearchStatusException.class, () -> TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks)); @@ -62,7 +62,7 @@ public void testValidate_jobOpening() { PersistentTasksCustomMetaData tasks = tasksBuilder.build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) - .putDatafeed(datafeedConfig1, null) + .putDatafeed(datafeedConfig1, Collections.emptyMap()) .build(); TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks); @@ -78,7 +78,7 @@ public void testValidate_jobOpened() { PersistentTasksCustomMetaData tasks = tasksBuilder.build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) - .putDatafeed(datafeedConfig1, null) + .putDatafeed(datafeedConfig1, Collections.emptyMap()) .build(); TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java index 55a0f4006bcdd..934642986de96 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java @@ -42,7 +42,7 @@ public void testValidate() { DatafeedConfig datafeedConfig = createDatafeedConfig("foo", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder().putJob(job, false) - .putDatafeed(datafeedConfig, null) + .putDatafeed(datafeedConfig, Collections.emptyMap()) .build(); TransportStopDatafeedAction.validateDatafeedTask("foo", mlMetadata2); } @@ -54,12 +54,12 @@ public void testResolveDataFeedIds_GivenDatafeedId() { addTask("datafeed_1", 0L, "node-1", DatafeedState.STARTED, tasksBuilder); Job job = BaseMlIntegTestCase.createScheduledJob("job_id_1").build(new Date()); DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed_1", "job_id_1").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap()); addTask("datafeed_2", 0L, "node-1", DatafeedState.STOPPED, tasksBuilder); job = BaseMlIntegTestCase.createScheduledJob("job_id_2").build(new Date()); datafeedConfig = createDatafeedConfig("datafeed_2", "job_id_2").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap()); PersistentTasksCustomMetaData tasks = tasksBuilder.build(); MlMetadata mlMetadata = mlMetadataBuilder.build(); @@ -86,17 +86,17 @@ public void testResolveDataFeedIds_GivenAll() { addTask("datafeed_1", 0L, "node-1", DatafeedState.STARTED, tasksBuilder); Job job = BaseMlIntegTestCase.createScheduledJob("job_id_1").build(new Date()); DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed_1", "job_id_1").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap()); addTask("datafeed_2", 0L, "node-1", DatafeedState.STOPPED, tasksBuilder); job = BaseMlIntegTestCase.createScheduledJob("job_id_2").build(new Date()); datafeedConfig = createDatafeedConfig("datafeed_2", "job_id_2").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap()); addTask("datafeed_3", 0L, "node-1", DatafeedState.STOPPING, tasksBuilder); job = BaseMlIntegTestCase.createScheduledJob("job_id_3").build(new Date()); datafeedConfig = createDatafeedConfig("datafeed_3", "job_id_3").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap()); PersistentTasksCustomMetaData tasks = tasksBuilder.build(); MlMetadata mlMetadata = mlMetadataBuilder.build(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java index f609f0c8c5ed9..6ce03d22b64f0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java @@ -84,7 +84,7 @@ public void setUpTests() { Job job = createDatafeedJob().build(new Date()); mlMetadata.putJob(job, false); DatafeedConfig datafeed = createDatafeedConfig("datafeed_id", job.getId()).build(); - mlMetadata.putDatafeed(datafeed, null); + mlMetadata.putDatafeed(datafeed, Collections.emptyMap()); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask(job.getId(), "node_id", JobState.OPENED, tasksBuilder); PersistentTasksCustomMetaData tasks = tasksBuilder.build(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java index 96ae3b5ef38b6..f3fa804bb27b9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java @@ -68,7 +68,7 @@ public void testSelectNode_GivenJobIsOpened() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -86,7 +86,7 @@ public void testSelectNode_GivenJobIsOpening() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -106,7 +106,7 @@ public void testNoJobTask() { mlMetadataBuilder.putJob(job, false); // Using wildcard index name to test for index resolving as well - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); tasks = PersistentTasksCustomMetaData.builder().build(); @@ -128,7 +128,7 @@ public void testSelectNode_GivenJobFailedOrClosed() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -156,7 +156,7 @@ public void testShardUnassigned() { mlMetadataBuilder.putJob(job, false); // Using wildcard index name to test for index resolving as well - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -182,7 +182,7 @@ public void testShardNotAllActive() { mlMetadataBuilder.putJob(job, false); // Using wildcard index name to test for index resolving as well - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -207,7 +207,8 @@ public void testIndexDoesntExist() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")), + Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -231,7 +232,8 @@ public void testRemoteIndex() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("remote:foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("remote:foo")), + Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -248,7 +250,7 @@ public void testSelectNode_jobTaskStale() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); String nodeId = randomBoolean() ? "node_id2" : null; @@ -286,7 +288,8 @@ public void testSelectNode_GivenJobOpeningAndIndexDoesNotExist() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")), + Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java index 357c2bc232552..14ec4813a749e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.MetaData; @@ -47,7 +46,7 @@ public void onFailure(String source, Exception e) { } @Override - public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { markAsDeletedLatch.countDown(); } }); @@ -90,7 +89,7 @@ public void onFailure(String source, Exception e) { } @Override - public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { removeJobLatch.countDown(); } }); From 3f5ebb862de56efed4edcdb8b338590cb54be6f5 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Mon, 18 Jun 2018 08:21:41 -0700 Subject: [PATCH 17/92] Clarify that IP range data can be specified in CIDR notation. (#31374) --- docs/reference/mapping/types/range.asciidoc | 24 +++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/docs/reference/mapping/types/range.asciidoc b/docs/reference/mapping/types/range.asciidoc index 3013204e4ca21..a7ab6346176cb 100644 --- a/docs/reference/mapping/types/range.asciidoc +++ b/docs/reference/mapping/types/range.asciidoc @@ -168,6 +168,30 @@ This query produces a similar result: -------------------------------------------------- // TESTRESPONSE[s/"took": 13/"took" : $body.took/] +[[ip-range]] +==== IP Range + +In addition to the range format above, IP ranges can be provided in +https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing#CIDR_notation[CIDR] notation: + +[source,js] +-------------------------------------------------- +PUT range_index/_mapping/_doc +{ + "properties": { + "ip_whitelist": { + "type": "ip_range" + } + } +} + +PUT range_index/_doc/2 +{ + "ip_whitelist" : "192.168.0.0/16" +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:range_index] [[range-params]] ==== Parameters for range fields From ea92864eb179cb68af3f2a774713527f493f550b Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 18 Jun 2018 08:48:23 -0700 Subject: [PATCH 18/92] [DOCS] Adds testing for security APIs (#31345) --- x-pack/docs/build.gradle | 4 ---- .../en/rest-api/security/authenticate.asciidoc | 14 ++++++++------ x-pack/docs/en/rest-api/security/ssl.asciidoc | 1 + x-pack/docs/en/rest-api/security/tokens.asciidoc | 1 + x-pack/docs/en/rest-api/security/users.asciidoc | 1 + 5 files changed, 11 insertions(+), 10 deletions(-) diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 5bff371d9c26a..ed70fcd44a7f2 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -16,9 +16,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/ml/functions/rare.asciidoc', 'en/ml/functions/sum.asciidoc', 'en/ml/functions/time.asciidoc', - 'en/rest-api/security/ssl.asciidoc', - 'en/rest-api/security/users.asciidoc', - 'en/rest-api/security/tokens.asciidoc', 'en/rest-api/watcher/put-watch.asciidoc', 'en/security/authentication/user-cache.asciidoc', 'en/security/authorization/field-and-document-access-control.asciidoc', @@ -76,7 +73,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/rest-api/ml/update-snapshot.asciidoc', 'en/rest-api/ml/validate-detector.asciidoc', 'en/rest-api/ml/validate-job.asciidoc', - 'en/rest-api/security/authenticate.asciidoc', 'en/rest-api/watcher/stats.asciidoc', 'en/watcher/example-watches/watching-time-series-data.asciidoc', ] diff --git a/x-pack/docs/en/rest-api/security/authenticate.asciidoc b/x-pack/docs/en/rest-api/security/authenticate.asciidoc index ba837ddfd2c20..ab259762332f9 100644 --- a/x-pack/docs/en/rest-api/security/authenticate.asciidoc +++ b/x-pack/docs/en/rest-api/security/authenticate.asciidoc @@ -35,12 +35,14 @@ The following example output provides information about the "rdeniro" user: -------------------------------------------------- { "username": "rdeniro", - "roles": [ - "admin", - "kibana4" + "roles": [ + "admin" ], - "metadata" : { - "employee_id": "8675309" - } + "full_name": null, + "email": null, + "metadata": { }, + "enabled": true } -------------------------------------------------- +// TESTRESPONSE[s/"rdeniro"/"$body.username"/] +// TESTRESPONSE[s/"admin"/"superuser"/] \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/security/ssl.asciidoc b/x-pack/docs/en/rest-api/security/ssl.asciidoc index f7a40c6d87607..6462699570fb0 100644 --- a/x-pack/docs/en/rest-api/security/ssl.asciidoc +++ b/x-pack/docs/en/rest-api/security/ssl.asciidoc @@ -109,3 +109,4 @@ The API returns the following results: } ] ---- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/security/tokens.asciidoc b/x-pack/docs/en/rest-api/security/tokens.asciidoc index 70f255ead37c0..f991a5c0cb836 100644 --- a/x-pack/docs/en/rest-api/security/tokens.asciidoc +++ b/x-pack/docs/en/rest-api/security/tokens.asciidoc @@ -98,6 +98,7 @@ by the value of the `access_token`. -------------------------------------------------- curl -H "Authorization: Bearer dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==" http://localhost:9200/_cluster/health -------------------------------------------------- +// NOTCONSOLE [[security-api-refresh-token]] To extend the life of an existing token, the token api may be called again with the refresh diff --git a/x-pack/docs/en/rest-api/security/users.asciidoc b/x-pack/docs/en/rest-api/security/users.asciidoc index 926193481afbc..c84da5c7d75ff 100644 --- a/x-pack/docs/en/rest-api/security/users.asciidoc +++ b/x-pack/docs/en/rest-api/security/users.asciidoc @@ -115,6 +115,7 @@ authenticated. For example: -------------------------------------------------- curl -u jacknich:j@rV1s http://localhost:9200/_cluster/health -------------------------------------------------- +// NOTCONSOLE [[security-api-get-user]] To retrieve a native user, submit a GET request to the `/_xpack/security/user/` From 73549281e8443a0b579b3f606a41b720465d8054 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 18 Jun 2018 12:06:42 -0400 Subject: [PATCH 19/92] Docs: Use the default distribution to test docs (#31251) This switches the docs tests from the `oss-zip` distribution to the `zip` distribution so they have xpack installed and configured with the default basic license. The goal is to be able to merge the `x-pack/docs` directory into the `docs` directory, marking the x-pack docs with some kind of marker. This is the first step in that process. This also enables `-Dtests.distribution` support for the `docs` directory so you can run the tests against the `oss-zip` distribution with something like ``` ./gradlew -p docs check -Dtests.distribution=oss-zip ``` We can set up Jenkins to run both. Relates to #30665 --- .../gradle/doc/DocsTestPlugin.groovy | 4 +++ docs/Versions.asciidoc | 1 + docs/build.gradle | 1 - docs/plugins/discovery-azure-classic.asciidoc | 2 +- docs/reference/cat/nodeattrs.asciidoc | 25 +++++++++++++++--- docs/reference/cat/templates.asciidoc | 7 +++++ docs/reference/cat/thread_pool.asciidoc | 25 +++++++++++------- docs/reference/cluster/nodes-info.asciidoc | 4 +-- docs/reference/cluster/stats.asciidoc | 26 +++++++++---------- .../setup/install/check-running.asciidoc | 2 +- 10 files changed, 66 insertions(+), 31 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy index f674dbd33cdfd..27f122b8610ee 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy @@ -32,6 +32,8 @@ public class DocsTestPlugin extends RestTestPlugin { public void apply(Project project) { project.pluginManager.apply('elasticsearch.standalone-rest-test') super.apply(project) + // The distribution can be configured with -Dtests.distribution on the command line + project.integTestCluster.distribution = System.getProperty('tests.distribution', 'zip') // Docs are published separately so no need to assemble project.tasks.remove(project.assemble) project.build.dependsOn.remove('assemble') @@ -43,6 +45,8 @@ public class DocsTestPlugin extends RestTestPlugin { '\\{version\\}': VersionProperties.elasticsearch.toString().replace('-SNAPSHOT', ''), '\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''), + '\\{build_flavor\\}' : + project.integTestCluster.distribution.startsWith('oss-') ? 'oss' : 'default', ] Task listSnippets = project.tasks.create('listSnippets', SnippetsTask) listSnippets.group 'Docs' diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index 608d5b9288e6e..948a3387f0350 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -5,6 +5,7 @@ :branch: master :jdk: 1.8.0_131 :jdk_major: 8 +:build_flavor: default ////////// release-state can be: released | prerelease | unreleased diff --git a/docs/build.gradle b/docs/build.gradle index 6498de0218120..f1d1324192b16 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -20,7 +20,6 @@ apply plugin: 'elasticsearch.docs-test' integTestCluster { - distribution = 'oss-zip' /* Enable regexes in painless so our tests don't complain about example * snippets that use them. */ setting 'script.painless.regex.enabled', 'true' diff --git a/docs/plugins/discovery-azure-classic.asciidoc b/docs/plugins/discovery-azure-classic.asciidoc index c56991b8f507f..1c1925de878aa 100644 --- a/docs/plugins/discovery-azure-classic.asciidoc +++ b/docs/plugins/discovery-azure-classic.asciidoc @@ -372,7 +372,7 @@ This command should give you a JSON result: "cluster_uuid" : "AT69_T_DTp-1qgIJlatQqA", "version" : { "number" : "{version}", - "build_flavor" : "oss", + "build_flavor" : "{build_flavor}", "build_type" : "zip", "build_hash" : "f27399d", "build_date" : "2016-03-30T09:51:41.449Z", diff --git a/docs/reference/cat/nodeattrs.asciidoc b/docs/reference/cat/nodeattrs.asciidoc index 196f142cc35e1..6c474c2117943 100644 --- a/docs/reference/cat/nodeattrs.asciidoc +++ b/docs/reference/cat/nodeattrs.asciidoc @@ -9,15 +9,23 @@ For example: GET /_cat/nodeattrs?v -------------------------------------------------- // CONSOLE +// TEST[s/\?v/\?v&s=node,attr/] +// Sort the resulting attributes so we can assert on them more easilly Could look like: [source,txt] -------------------------------------------------- node host ip attr value -EK_AsJb 127.0.0.1 127.0.0.1 testattr test +... +node-0 127.0.0.1 127.0.0.1 testattr test +... -------------------------------------------------- -// TESTRESPONSE[s/EK_AsJb/.+/ _cat] +// TESTRESPONSE[s/\.\.\.\n$/\n(.+ xpack\\.installed true\n)?\n/] +// TESTRESPONSE[s/\.\.\.\n/(.+ ml\\..+\n)*/ _cat] +// If xpack is not installed then neither ... with match anything +// If xpack is installed then the first ... contains ml attributes +// and the second contains xpack.installed=true The first few columns (`node`, `host`, `ip`) give you basic info per node and the `attr` and `value` columns give you the custom node attributes, @@ -46,15 +54,24 @@ mode (`v`). The header name will match the supplied value (e.g., GET /_cat/nodeattrs?v&h=name,pid,attr,value -------------------------------------------------- // CONSOLE +// TEST[s/,value/,value&s=node,attr/] +// Sort the resulting attributes so we can assert on them more easilly Might look like: [source,txt] -------------------------------------------------- name pid attr value -EK_AsJb 19566 testattr test +... +node-0 19566 testattr test +... -------------------------------------------------- -// TESTRESPONSE[s/EK_AsJb/.+/ s/19566/\\d*/ _cat] +// TESTRESPONSE[s/19566/\\d*/] +// TESTRESPONSE[s/\.\.\.\n$/\n(.+ xpack\\.installed true\n)?\n/] +// TESTRESPONSE[s/\.\.\.\n/(.+ ml\\..+\n)*/ _cat] +// If xpack is not installed then neither ... with match anything +// If xpack is installed then the first ... contains ml attributes +// and the second contains xpack.installed=true [cols="<,<,<,<,<",options="header",subs="normal"] |======================================================================= diff --git a/docs/reference/cat/templates.asciidoc b/docs/reference/cat/templates.asciidoc index bc221d13552c0..076e84b72b5d3 100644 --- a/docs/reference/cat/templates.asciidoc +++ b/docs/reference/cat/templates.asciidoc @@ -8,9 +8,16 @@ The `templates` command provides information about existing templates. GET /_cat/templates?v&s=name -------------------------------------------------- // CONSOLE +// TEST[s/templates/templates\/template*/] // TEST[s/^/PUT _template\/template0\n{"index_patterns": "te*", "order": 0}\n/] // TEST[s/^/PUT _template\/template1\n{"index_patterns": "tea*", "order": 1}\n/] // TEST[s/^/PUT _template\/template2\n{"index_patterns": "teak*", "order": 2, "version": 7}\n/] +// The substitions do two things: +// 1. Filter the response to just templates matching the te* pattern +// so that we only get the templates we expect regardless of which +// templates exist. If xpack is installed there will be unexpected +// templates. +// 2. Create some templates to expect in the response. which looks like diff --git a/docs/reference/cat/thread_pool.asciidoc b/docs/reference/cat/thread_pool.asciidoc index 306650feb958b..9528b7829e37f 100644 --- a/docs/reference/cat/thread_pool.asciidoc +++ b/docs/reference/cat/thread_pool.asciidoc @@ -18,18 +18,19 @@ node-0 analyze 0 0 0 node-0 fetch_shard_started 0 0 0 node-0 fetch_shard_store 0 0 0 node-0 flush 0 0 0 -node-0 force_merge 0 0 0 -node-0 generic 0 0 0 -node-0 get 0 0 0 -node-0 listener 0 0 0 -node-0 management 1 0 0 -node-0 refresh 0 0 0 -node-0 search 0 0 0 -node-0 snapshot 0 0 0 -node-0 warmer 0 0 0 +... node-0 write 0 0 0 -------------------------------------------------- +// TESTRESPONSE[s/\.\.\./(node-0 .+ 0 0 0\n)+/] // TESTRESPONSE[s/\d+/\\d+/ _cat] +// The substitutions do two things: +// 1. Expect any number of extra thread pools. This allows us to only list a +// few thread pools. The list would be super long otherwise. In addition, +// if xpack is installed then the list will contain more thread pools and +// this way we don't have to assert about them. +// 2. Expect any number of active, queued, or rejected items. We really don't +// know how many there will be and we just want to assert that there are +// numbers in the response, not *which* numbers are there. The first column is the node name @@ -52,10 +53,16 @@ generic get listener management +ml_autodetect (default distro only) +ml_datafeed (default distro only) +ml_utility (default distro only) refresh +rollup_indexing (default distro only)` search +security-token-key (default distro only) snapshot warmer +watcher (default distro only) write -------------------------------------------------- diff --git a/docs/reference/cluster/nodes-info.asciidoc b/docs/reference/cluster/nodes-info.asciidoc index 6522d0f5ad68a..2cd61dd905ff6 100644 --- a/docs/reference/cluster/nodes-info.asciidoc +++ b/docs/reference/cluster/nodes-info.asciidoc @@ -142,7 +142,7 @@ The result will look similar to: "host": "node-0.elastic.co", "ip": "192.168.17", "version": "{version}", - "build_flavor": "oss", + "build_flavor": "{build_flavor}", "build_type": "zip", "build_hash": "587409e", "roles": [ @@ -237,7 +237,7 @@ The result will look similar to: "host": "node-0.elastic.co", "ip": "192.168.17", "version": "{version}", - "build_flavor": "oss", + "build_flavor": "{build_flavor}", "build_type": "zip", "build_hash": "587409e", "roles": [], diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index 6efb4dced8bb8..191da2660d668 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -192,23 +192,23 @@ Will return, for example: "description": "Ingest processor that extracts information from a user agent", "classname": "org.elasticsearch.ingest.useragent.IngestUserAgentPlugin", "has_native_controller": false - } + }, + ... ], - "network_types" : { - "transport_types" : { - "netty4" : 1 - }, - "http_types" : { - "netty4" : 1 - } - } + ... } } -------------------------------------------------- // TESTRESPONSE[s/"plugins": \[[^\]]*\]/"plugins": $body.$_path/] +// TESTRESPONSE[s/\.\.\./"network_types": "replace_me"/] // TESTRESPONSE[s/: (\-)?[0-9]+/: $body.$_path/] // TESTRESPONSE[s/: "[^"]*"/: $body.$_path/] -//// -The TESTRESPONSE above replace all the fields values by the expected ones in the test, -because we don't really care about the field values but we want to check the fields names. -//// \ No newline at end of file +// These replacements do a few things: +// 1. Ignore the contents of the `plugins` object because we don't know all of +// the plugins that will be in it. And because we figure folks don't need to +// see an exhaustive list anyway. +// 2. The last ... contains more things that we don't think are important to +// include in the output. +// 3. All of the numbers and strings on the right hand side of *every* field in +// the response are ignored. So we're really only asserting things about the +// the shape of this response, not the values in it. diff --git a/docs/reference/setup/install/check-running.asciidoc b/docs/reference/setup/install/check-running.asciidoc index 0cfc4b329ecfa..7b95a10158d2f 100644 --- a/docs/reference/setup/install/check-running.asciidoc +++ b/docs/reference/setup/install/check-running.asciidoc @@ -19,7 +19,7 @@ which should give you a response something like this: "cluster_uuid" : "AT69_T_DTp-1qgIJlatQqA", "version" : { "number" : "{version}", - "build_flavor" : "oss", + "build_flavor" : "{build_flavor}", "build_type" : "zip", "build_hash" : "f27399d", "build_date" : "2016-03-30T09:51:41.449Z", From f3297ed23a9472e2f184b0ac4b1f367daef44cbc Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 18 Jun 2018 19:02:51 +0200 Subject: [PATCH 20/92] Packaging: Remove windows bin files from the tar distribution (#30596) This commit removes windows specific files from the tar distribution. Windows users use the zip, linux users use the tar. --- distribution/archives/build.gradle | 11 ---------- distribution/build.gradle | 21 +++++++++++++++++-- .../migration/migrate_7_0/packaging.asciidoc | 5 +++++ .../test/resources/packaging/utils/xpack.bash | 12 ----------- 4 files changed, 24 insertions(+), 25 deletions(-) diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index 71606c2c027a5..16b2a8a7a6ce6 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -18,7 +18,6 @@ */ import org.apache.tools.ant.taskdefs.condition.Os -import org.apache.tools.ant.filters.FixCrLfFilter import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.EmptyDirTask import org.elasticsearch.gradle.LoggedExec @@ -59,13 +58,6 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean os } into('bin') { with binFiles(distributionType, oss) - with copySpec { - from('../src/bin') { - include '*.bat' - filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf')) - } - MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss)) - } } into('') { from { @@ -88,9 +80,6 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean os } with noticeFile - from('../src') { - include 'bin/*.exe' - } into('modules') { with modulesFiles } diff --git a/distribution/build.gradle b/distribution/build.gradle index ff3a06b4dc577..6ffb678cb2ba8 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -21,6 +21,7 @@ import org.elasticsearch.gradle.ConcatFilesTask import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.NoticeTask import org.elasticsearch.gradle.test.RunTask +import org.apache.tools.ant.filters.FixCrLfFilter import java.nio.file.Path @@ -281,15 +282,28 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { binFiles = { distributionType, oss -> copySpec { + // non-windows files, for all distributions with copySpec { - // main bin files, processed with distribution specific substitutions - // everything except windows files from '../src/bin' exclude '*.exe' exclude '*.bat' eachFile { it.setMode(0755) } MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss)) } + // windows files, only for zip + if (distributionType == 'zip') { + with copySpec { + from '../src/bin' + include '*.bat' + filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf')) + MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss)) + } + with copySpec { + from '../src/bin' + include '*.exe' + } + } + // module provided bin files with copySpec { eachFile { it.setMode(0755) } if (oss) { @@ -297,6 +311,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } else { from project(':distribution').buildDefaultBin } + if (distributionType != 'zip') { + exclude '*.bat' + } } } } diff --git a/docs/reference/migration/migrate_7_0/packaging.asciidoc b/docs/reference/migration/migrate_7_0/packaging.asciidoc index 4070d6807332d..934522db7162f 100644 --- a/docs/reference/migration/migrate_7_0/packaging.asciidoc +++ b/docs/reference/migration/migrate_7_0/packaging.asciidoc @@ -8,3 +8,8 @@ The systemd service file `/usr/lib/systemd/system/elasticsearch.service` was previously marked as a configuration file in rpm and deb packages. Overrides to the systemd elasticsearch service should be made in `/etc/systemd/system/elasticsearch.service.d/override.conf`. + +==== tar package no longer includes windows specific files + +The tar package previously included files in the `bin` directory meant only +for windows. These files have been removed. Use the `zip` package instead. diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash b/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash index 3e44ee9f83a58..c267744194a1c 100644 --- a/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash +++ b/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash @@ -15,30 +15,18 @@ verify_xpack_installation() { #assert_file "$ESHOME/bin" d $user $group 755 local binaryFiles=( 'elasticsearch-certgen' - 'elasticsearch-certgen.bat' 'elasticsearch-certutil' - 'elasticsearch-certutil.bat' 'elasticsearch-croneval' - 'elasticsearch-croneval.bat' 'elasticsearch-migrate' - 'elasticsearch-migrate.bat' 'elasticsearch-saml-metadata' - 'elasticsearch-saml-metadata.bat' 'elasticsearch-setup-passwords' - 'elasticsearch-setup-passwords.bat' 'elasticsearch-sql-cli' - 'elasticsearch-sql-cli.bat' "elasticsearch-sql-cli-$(cat version).jar" # This jar is executable so we pitch it in bin so folks will find it 'elasticsearch-syskeygen' - 'elasticsearch-syskeygen.bat' 'elasticsearch-users' - 'elasticsearch-users.bat' 'x-pack-env' - 'x-pack-env.bat' 'x-pack-security-env' - 'x-pack-security-env.bat' 'x-pack-watcher-env' - 'x-pack-watcher-env.bat' ) local binaryFilesCount=5 # start with oss distro number From 340313b0482dbddde07b64172d668d27277e35bb Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 18 Jun 2018 19:03:46 +0200 Subject: [PATCH 21/92] RestAPI: Reject forcemerge requests with a body (#30792) This commit adds validation to forcemerge rest requests which contain a body. All parameters to force merge must be part of http params. closes #29584 --- .../admin/indices/RestForceMergeAction.java | 3 ++ .../forcemerge/RestForceMergeActionTests.java | 47 +++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java index dcc397be14263..6ec4cec77193e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java @@ -47,6 +47,9 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + if (request.hasContent()) { + throw new IllegalArgumentException("forcemerge takes arguments in query parameters, not in the request body"); + } ForceMergeRequest mergeRequest = new ForceMergeRequest(Strings.splitStringByCommaToArray(request.param("index"))); mergeRequest.indicesOptions(IndicesOptions.fromRequest(request, mergeRequest.indicesOptions())); mergeRequest.maxNumSegments(request.paramAsInt("max_num_segments", mergeRequest.maxNumSegments())); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java new file mode 100644 index 0000000000000..aeb5beb09e2fc --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.forcemerge; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.action.admin.indices.RestForceMergeAction; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; + +public class RestForceMergeActionTests extends ESTestCase { + + public void testBodyRejection() throws Exception { + final RestForceMergeAction handler = new RestForceMergeAction(Settings.EMPTY, mock(RestController.class)); + String json = JsonXContent.contentBuilder().startObject().field("max_num_segments", 1).endObject().toString(); + final FakeRestRequest request = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY) + .withContent(new BytesArray(json), XContentType.JSON).build(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> handler.prepareRequest(request, mock(NodeClient.class))); + assertThat(e.getMessage(), equalTo("forcemerge takes arguments in query parameters, not in the request body")); + } +} From d9a6d69a0ddcb434811705067b40a0630edea3de Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Mon, 18 Jun 2018 13:50:52 -0400 Subject: [PATCH 22/92] Fix defaults in GeoShapeFieldMapper output (#31302) GeoShapeFieldMapper should show actual defaults instead of placeholder values when the mapping is requested with include_defaults=true. Closes #23206 --- .../index/mapper/GeoShapeFieldMapper.java | 17 +++- .../mapper/GeoShapeFieldMapperTests.java | 77 +++++++++++++++++++ 2 files changed, 92 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index c0158f61c3af7..7b083c2ce9e0f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -546,11 +546,24 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, if (includeDefaults || fieldType().tree().equals(Defaults.TREE) == false) { builder.field(Names.TREE, fieldType().tree()); } - if (includeDefaults || fieldType().treeLevels() != 0) { + + if (fieldType().treeLevels() != 0) { builder.field(Names.TREE_LEVELS, fieldType().treeLevels()); + } else if(includeDefaults && fieldType().precisionInMeters() == -1) { // defaults only make sense if precision is not specified + if ("geohash".equals(fieldType().tree())) { + builder.field(Names.TREE_LEVELS, Defaults.GEOHASH_LEVELS); + } else if ("legacyquadtree".equals(fieldType().tree())) { + builder.field(Names.TREE_LEVELS, Defaults.QUADTREE_LEVELS); + } else if ("quadtree".equals(fieldType().tree())) { + builder.field(Names.TREE_LEVELS, Defaults.QUADTREE_LEVELS); + } else { + throw new IllegalArgumentException("Unknown prefix tree type [" + fieldType().tree() + "]"); + } } - if (includeDefaults || fieldType().precisionInMeters() != -1) { + if (fieldType().precisionInMeters() != -1) { builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(fieldType().precisionInMeters())); + } else if (includeDefaults && fieldType().treeLevels() == 0) { // defaults only make sense if tree levels are not specified + builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(50)); } if (includeDefaults || fieldType().strategyName() != Defaults.STRATEGY) { builder.field(Names.STRATEGY, fieldType().strategyName()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index 201e749cd22e7..00b3b7c7f3e73 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -27,6 +27,8 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -34,6 +36,7 @@ import java.io.IOException; import java.util.Collection; +import java.util.Collections; import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_Z_VALUE; import static org.hamcrest.Matchers.containsString; @@ -517,4 +520,78 @@ public void testEmptyName() throws Exception { assertThat(e.getMessage(), containsString("name cannot be empty string")); } + public void testSerializeDefaults() throws Exception { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":21")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":9")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("tree_levels", "6") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertFalse(serialized, serialized.contains("\"precision\":")); + assertTrue(serialized, serialized.contains("\"tree_levels\":6")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("precision", "6") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\"")); + assertFalse(serialized, serialized.contains("\"tree_levels\":")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("precision", "6m") + .field("tree_levels", "5") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":5")); + } + } + + public String toXContentString(GeoShapeFieldMapper mapper) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + mapper.doXContentBody(builder, true, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"))); + return Strings.toString(builder.endObject()); + } + } From 2a8381d3fa5c8d88aff662d3c1066a6e258578c2 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 18 Jun 2018 15:05:34 -0400 Subject: [PATCH 23/92] Avoid sending duplicate remote failed shard requests (#31313) Today if a write replication request fails, we will send a shard-failed message to the master node to fail that replica. However, if there are many ongoing write replication requests and the master node is busy, we might overwhelm the cluster and the master node with many shard-failed requests. This commit tries to minimize the shard-failed requests in the above scenario by caching the ongoing shard-failed requests. This issue was discussed at https://discuss.elastic.co/t/half-dead-node-lead-to-cluster-hang/113658/25. --- .../action/shard/ShardStateAction.java | 123 ++++++++++++++- ...rdFailedClusterStateTaskExecutorTests.java | 64 ++++---- .../action/shard/ShardStateActionTests.java | 146 +++++++++++++++++- 3 files changed, 299 insertions(+), 34 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 915e900b9ddf1..f690efa4c9a0c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -25,10 +25,10 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.MasterNodeChangePredicate; import org.elasticsearch.cluster.NotMasterException; @@ -48,6 +48,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.node.NodeClosedException; @@ -68,7 +69,9 @@ import java.util.HashSet; import java.util.List; import java.util.Locale; +import java.util.Objects; import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.function.Predicate; public class ShardStateAction extends AbstractComponent { @@ -80,6 +83,10 @@ public class ShardStateAction extends AbstractComponent { private final ClusterService clusterService; private final ThreadPool threadPool; + // a list of shards that failed during replication + // we keep track of these shards in order to avoid sending duplicate failed shard requests for a single failing shard. + private final ConcurrentMap remoteFailedShardsCache = ConcurrentCollections.newConcurrentMap(); + @Inject public ShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService, ThreadPool threadPool) { @@ -146,8 +153,35 @@ private static boolean isMasterChannelException(TransportException exp) { */ public void remoteShardFailed(final ShardId shardId, String allocationId, long primaryTerm, boolean markAsStale, final String message, @Nullable final Exception failure, Listener listener) { assert primaryTerm > 0L : "primary term should be strictly positive"; - FailedShardEntry shardEntry = new FailedShardEntry(shardId, allocationId, primaryTerm, message, failure, markAsStale); - sendShardAction(SHARD_FAILED_ACTION_NAME, clusterService.state(), shardEntry, listener); + final FailedShardEntry shardEntry = new FailedShardEntry(shardId, allocationId, primaryTerm, message, failure, markAsStale); + final CompositeListener compositeListener = new CompositeListener(listener); + final CompositeListener existingListener = remoteFailedShardsCache.putIfAbsent(shardEntry, compositeListener); + if (existingListener == null) { + sendShardAction(SHARD_FAILED_ACTION_NAME, clusterService.state(), shardEntry, new Listener() { + @Override + public void onSuccess() { + try { + compositeListener.onSuccess(); + } finally { + remoteFailedShardsCache.remove(shardEntry); + } + } + @Override + public void onFailure(Exception e) { + try { + compositeListener.onFailure(e); + } finally { + remoteFailedShardsCache.remove(shardEntry); + } + } + }); + } else { + existingListener.addListener(listener); + } + } + + int remoteShardFailedCacheSize() { + return remoteFailedShardsCache.size(); } /** @@ -414,6 +448,23 @@ public String toString() { components.add("markAsStale [" + markAsStale + "]"); return String.join(", ", components); } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FailedShardEntry that = (FailedShardEntry) o; + // Exclude message and exception from equals and hashCode + return Objects.equals(this.shardId, that.shardId) && + Objects.equals(this.allocationId, that.allocationId) && + primaryTerm == that.primaryTerm && + markAsStale == that.markAsStale; + } + + @Override + public int hashCode() { + return Objects.hash(shardId, allocationId, primaryTerm, markAsStale); + } } public void shardStarted(final ShardRouting shardRouting, final String message, Listener listener) { @@ -585,6 +636,72 @@ default void onFailure(final Exception e) { } + /** + * A composite listener that allows registering multiple listeners dynamically. + */ + static final class CompositeListener implements Listener { + private boolean isNotified = false; + private Exception failure = null; + private final List listeners = new ArrayList<>(); + + CompositeListener(Listener listener) { + listeners.add(listener); + } + + void addListener(Listener listener) { + final boolean ready; + synchronized (this) { + ready = this.isNotified; + if (ready == false) { + listeners.add(listener); + } + } + if (ready) { + if (failure != null) { + listener.onFailure(failure); + } else { + listener.onSuccess(); + } + } + } + + private void onCompleted(Exception failure) { + synchronized (this) { + this.failure = failure; + this.isNotified = true; + } + RuntimeException firstException = null; + for (Listener listener : listeners) { + try { + if (failure != null) { + listener.onFailure(failure); + } else { + listener.onSuccess(); + } + } catch (RuntimeException innerEx) { + if (firstException == null) { + firstException = innerEx; + } else { + firstException.addSuppressed(innerEx); + } + } + } + if (firstException != null) { + throw firstException; + } + } + + @Override + public void onSuccess() { + onCompleted(null); + } + + @Override + public void onFailure(Exception failure) { + onCompleted(failure); + } + } + public static class NoLongerPrimaryShardException extends ElasticsearchException { public NoLongerPrimaryShardException(ShardId shardId, String msg) { diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java index 9eeef54dfd796..01d0c518c1be7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java @@ -22,11 +22,11 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.index.CorruptIndexException; import org.elasticsearch.Version; -import org.elasticsearch.cluster.action.shard.ShardStateAction.FailedShardEntry; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ESAllocationTestCase; +import org.elasticsearch.cluster.action.shard.ShardStateAction.FailedShardEntry; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -43,6 +43,7 @@ import org.elasticsearch.cluster.routing.allocation.StaleShard; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.Index; @@ -53,9 +54,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.Set; -import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -131,10 +130,15 @@ ClusterState applyFailedShards(ClusterState currentState, List fail tasks.addAll(failingTasks); tasks.addAll(nonExistentTasks); ClusterStateTaskExecutor.ClusterTasksResult result = failingExecutor.execute(currentState, tasks); - Map taskResultMap = - failingTasks.stream().collect(Collectors.toMap(Function.identity(), task -> ClusterStateTaskExecutor.TaskResult.failure(new RuntimeException("simulated applyFailedShards failure")))); - taskResultMap.putAll(nonExistentTasks.stream().collect(Collectors.toMap(Function.identity(), task -> ClusterStateTaskExecutor.TaskResult.success()))); - assertTaskResults(taskResultMap, result, currentState, false); + List> taskResultList = new ArrayList<>(); + for (FailedShardEntry failingTask : failingTasks) { + taskResultList.add(Tuple.tuple(failingTask, + ClusterStateTaskExecutor.TaskResult.failure(new RuntimeException("simulated applyFailedShards failure")))); + } + for (FailedShardEntry nonExistentTask : nonExistentTasks) { + taskResultList.add(Tuple.tuple(nonExistentTask, ClusterStateTaskExecutor.TaskResult.success())); + } + assertTaskResults(taskResultList, result, currentState, false); } public void testIllegalShardFailureRequests() throws Exception { @@ -147,14 +151,14 @@ public void testIllegalShardFailureRequests() throws Exception { tasks.add(new FailedShardEntry(failingTask.shardId, failingTask.allocationId, randomIntBetween(1, (int) primaryTerm - 1), failingTask.message, failingTask.failure, randomBoolean())); } - Map taskResultMap = - tasks.stream().collect(Collectors.toMap( - Function.identity(), - task -> ClusterStateTaskExecutor.TaskResult.failure(new ShardStateAction.NoLongerPrimaryShardException(task.shardId, - "primary term [" + task.primaryTerm + "] did not match current primary term [" + - currentState.metaData().index(task.shardId.getIndex()).primaryTerm(task.shardId.id()) + "]")))); + List> taskResultList = tasks.stream() + .map(task -> Tuple.tuple(task, ClusterStateTaskExecutor.TaskResult.failure( + new ShardStateAction.NoLongerPrimaryShardException(task.shardId, "primary term [" + + task.primaryTerm + "] did not match current primary term [" + + currentState.metaData().index(task.shardId.getIndex()).primaryTerm(task.shardId.id()) + "]")))) + .collect(Collectors.toList()); ClusterStateTaskExecutor.ClusterTasksResult result = executor.execute(currentState, tasks); - assertTaskResults(taskResultMap, result, currentState, false); + assertTaskResults(taskResultList, result, currentState, false); } public void testMarkAsStaleWhenFailingShard() throws Exception { @@ -251,44 +255,44 @@ private static void assertTasksSuccessful( ClusterState clusterState, boolean clusterStateChanged ) { - Map taskResultMap = - tasks.stream().collect(Collectors.toMap(Function.identity(), task -> ClusterStateTaskExecutor.TaskResult.success())); - assertTaskResults(taskResultMap, result, clusterState, clusterStateChanged); + List> taskResultList = tasks.stream() + .map(t -> Tuple.tuple(t, ClusterStateTaskExecutor.TaskResult.success())).collect(Collectors.toList()); + assertTaskResults(taskResultList, result, clusterState, clusterStateChanged); } private static void assertTaskResults( - Map taskResultMap, + List> taskResultList, ClusterStateTaskExecutor.ClusterTasksResult result, ClusterState clusterState, boolean clusterStateChanged ) { // there should be as many task results as tasks - assertEquals(taskResultMap.size(), result.executionResults.size()); + assertEquals(taskResultList.size(), result.executionResults.size()); - for (Map.Entry entry : taskResultMap.entrySet()) { + for (Tuple entry : taskResultList) { // every task should have a corresponding task result - assertTrue(result.executionResults.containsKey(entry.getKey())); + assertTrue(result.executionResults.containsKey(entry.v1())); // the task results are as expected - assertEquals(entry.getKey().toString(), entry.getValue().isSuccess(), result.executionResults.get(entry.getKey()).isSuccess()); + assertEquals(entry.v1().toString(), entry.v2().isSuccess(), result.executionResults.get(entry.v1()).isSuccess()); } List shards = clusterState.getRoutingTable().allShards(); - for (Map.Entry entry : taskResultMap.entrySet()) { - if (entry.getValue().isSuccess()) { + for (Tuple entry : taskResultList) { + if (entry.v2().isSuccess()) { // the shard was successfully failed and so should not be in the routing table for (ShardRouting shard : shards) { if (shard.assignedToNode()) { - assertFalse("entry key " + entry.getKey() + ", shard routing " + shard, - entry.getKey().getShardId().equals(shard.shardId()) && - entry.getKey().getAllocationId().equals(shard.allocationId().getId())); + assertFalse("entry key " + entry.v1() + ", shard routing " + shard, + entry.v1().getShardId().equals(shard.shardId()) && + entry.v1().getAllocationId().equals(shard.allocationId().getId())); } } } else { // check we saw the expected failure - ClusterStateTaskExecutor.TaskResult actualResult = result.executionResults.get(entry.getKey()); - assertThat(actualResult.getFailure(), instanceOf(entry.getValue().getFailure().getClass())); - assertThat(actualResult.getFailure().getMessage(), equalTo(entry.getValue().getFailure().getMessage())); + ClusterStateTaskExecutor.TaskResult actualResult = result.executionResults.get(entry.v1()); + assertThat(actualResult.getFailure(), instanceOf(entry.v2().getFailure().getClass())); + assertThat(actualResult.getFailure().getMessage(), equalTo(entry.v2().getFailure().getMessage())); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index bbd326ff2fedb..1d78cdeb98374 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -59,9 +59,10 @@ import org.junit.BeforeClass; import java.io.IOException; -import java.util.UUID; import java.util.Collections; +import java.util.UUID; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Phaser; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -73,6 +74,8 @@ import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.sameInstance; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -138,6 +141,7 @@ public void tearDown() throws Exception { clusterService.close(); transportService.close(); super.tearDown(); + assertThat(shardStateAction.remoteShardFailedCacheSize(), equalTo(0)); } @AfterClass @@ -381,6 +385,89 @@ public void onFailure(Exception e) { assertThat(failure.get().getMessage(), equalTo(catastrophicError.getMessage())); } + public void testCacheRemoteShardFailed() throws Exception { + final String index = "test"; + setState(clusterService, ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); + ShardRouting failedShard = getRandomShardRouting(index); + boolean markAsStale = randomBoolean(); + int numListeners = between(1, 100); + CountDownLatch latch = new CountDownLatch(numListeners); + long primaryTerm = randomLongBetween(1, Long.MAX_VALUE); + for (int i = 0; i < numListeners; i++) { + shardStateAction.remoteShardFailed(failedShard.shardId(), failedShard.allocationId().getId(), + primaryTerm, markAsStale, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + @Override + public void onSuccess() { + latch.countDown(); + } + @Override + public void onFailure(Exception e) { + latch.countDown(); + } + }); + } + CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); + assertThat(capturedRequests, arrayWithSize(1)); + transport.handleResponse(capturedRequests[0].requestId, TransportResponse.Empty.INSTANCE); + latch.await(); + assertThat(transport.capturedRequests(), arrayWithSize(0)); + } + + public void testRemoteShardFailedConcurrently() throws Exception { + final String index = "test"; + final AtomicBoolean shutdown = new AtomicBoolean(false); + setState(clusterService, ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); + ShardRouting[] failedShards = new ShardRouting[between(1, 5)]; + for (int i = 0; i < failedShards.length; i++) { + failedShards[i] = getRandomShardRouting(index); + } + Thread[] clientThreads = new Thread[between(1, 6)]; + int iterationsPerThread = scaledRandomIntBetween(50, 500); + Phaser barrier = new Phaser(clientThreads.length + 2); // one for master thread, one for the main thread + Thread masterThread = new Thread(() -> { + barrier.arriveAndAwaitAdvance(); + while (shutdown.get() == false) { + for (CapturingTransport.CapturedRequest request : transport.getCapturedRequestsAndClear()) { + if (randomBoolean()) { + transport.handleResponse(request.requestId, TransportResponse.Empty.INSTANCE); + } else { + transport.handleRemoteError(request.requestId, randomFrom(getSimulatedFailure())); + } + } + } + }); + masterThread.start(); + + AtomicInteger notifiedResponses = new AtomicInteger(); + for (int t = 0; t < clientThreads.length; t++) { + clientThreads[t] = new Thread(() -> { + barrier.arriveAndAwaitAdvance(); + for (int i = 0; i < iterationsPerThread; i++) { + ShardRouting failedShard = randomFrom(failedShards); + shardStateAction.remoteShardFailed(failedShard.shardId(), failedShard.allocationId().getId(), + randomLongBetween(1, Long.MAX_VALUE), randomBoolean(), "test", getSimulatedFailure(), new ShardStateAction.Listener() { + @Override + public void onSuccess() { + notifiedResponses.incrementAndGet(); + } + @Override + public void onFailure(Exception e) { + notifiedResponses.incrementAndGet(); + } + }); + } + }); + clientThreads[t].start(); + } + barrier.arriveAndAwaitAdvance(); + for (Thread t : clientThreads) { + t.join(); + } + assertBusy(() -> assertThat(notifiedResponses.get(), equalTo(clientThreads.length * iterationsPerThread))); + shutdown.set(true); + masterThread.join(); + } + private ShardRouting getRandomShardRouting(String index) { IndexRoutingTable indexRoutingTable = clusterService.state().routingTable().index(index); ShardsIterator shardsIterator = indexRoutingTable.randomAllActiveShardsIt(); @@ -452,4 +539,61 @@ BytesReference serialize(Writeable writeable, Version version) throws IOExceptio return out.bytes(); } } + + public void testCompositeListener() throws Exception { + AtomicInteger successCount = new AtomicInteger(); + AtomicInteger failureCount = new AtomicInteger(); + Exception failure = randomBoolean() ? getSimulatedFailure() : null; + ShardStateAction.CompositeListener compositeListener = new ShardStateAction.CompositeListener(new ShardStateAction.Listener() { + @Override + public void onSuccess() { + successCount.incrementAndGet(); + } + @Override + public void onFailure(Exception e) { + assertThat(e, sameInstance(failure)); + failureCount.incrementAndGet(); + } + }); + int iterationsPerThread = scaledRandomIntBetween(100, 1000); + Thread[] threads = new Thread[between(1, 4)]; + Phaser barrier = new Phaser(threads.length + 1); + for (int i = 0; i < threads.length; i++) { + threads[i] = new Thread(() -> { + barrier.arriveAndAwaitAdvance(); + for (int n = 0; n < iterationsPerThread; n++) { + compositeListener.addListener(new ShardStateAction.Listener() { + @Override + public void onSuccess() { + successCount.incrementAndGet(); + } + @Override + public void onFailure(Exception e) { + assertThat(e, sameInstance(failure)); + failureCount.incrementAndGet(); + } + }); + } + }); + threads[i].start(); + } + barrier.arriveAndAwaitAdvance(); + if (failure != null) { + compositeListener.onFailure(failure); + } else { + compositeListener.onSuccess(); + } + for (Thread t : threads) { + t.join(); + } + assertBusy(() -> { + if (failure != null) { + assertThat(successCount.get(), equalTo(0)); + assertThat(failureCount.get(), equalTo(threads.length*iterationsPerThread + 1)); + } else { + assertThat(successCount.get(), equalTo(threads.length*iterationsPerThread + 1)); + assertThat(failureCount.get(), equalTo(0)); + } + }); + } } From 4ad334f8e0af43f6aa38bd8dbe7bc7227cd9eb11 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 18 Jun 2018 15:04:26 -0600 Subject: [PATCH 24/92] Fix reference to XContentBuilder.string() (#31337) In 6.3 this was moved to `Strings.toString(XContentBuilder)` as part of the XContent extraction. This commit fixes the docs to reference the new method. Resolves #31326 --- docs/java-api/docs/index_.asciidoc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/java-api/docs/index_.asciidoc b/docs/java-api/docs/index_.asciidoc index b455a7ab01ff3..2ce19cfffa098 100644 --- a/docs/java-api/docs/index_.asciidoc +++ b/docs/java-api/docs/index_.asciidoc @@ -99,11 +99,13 @@ Note that you can also add arrays with `startArray(String)` and other XContentBuilder objects. If you need to see the generated JSON content, you can use the -`string()` method. +`Strings.toString()` method. [source,java] -------------------------------------------------- -String json = builder.string(); +import org.elasticsearch.common.Strings; + +String json = Strings.toString(builder); -------------------------------------------------- From e67aa96c81f309ebedc1d529e255b48069b262d1 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 18 Jun 2018 23:53:04 +0200 Subject: [PATCH 25/92] Core: Combine Action and GenericAction (#31405) Since #30966, Action no longer has anything but a call to the GenericAction super constructor. This commit renames GenericAction into Action, thus eliminating the Action class. Additionally, this commit removes the Request generic parameter of the class, since it was unused. --- .../noop/action/bulk/NoopBulkAction.java | 3 +- .../noop/action/search/NoopSearchAction.java | 3 +- .../ingest/common/GrokProcessorGetAction.java | 2 +- .../mustache/MultiSearchTemplateAction.java | 2 +- .../script/mustache/SearchTemplateAction.java | 2 +- .../painless/PainlessExecuteAction.java | 2 +- .../index/rankeval/RankEvalAction.java | 2 +- .../rankeval/RankEvalRequestBuilder.java | 2 +- .../AbstractBaseReindexRestHandler.java | 4 +- .../AbstractBulkByQueryRestHandler.java | 4 +- .../BulkByScrollParallelizationHelper.java | 6 +- .../index/reindex/RethrottleAction.java | 2 +- .../reindex/RethrottleRequestBuilder.java | 2 +- .../reindex/AsyncBulkByScrollActionTests.java | 2 +- .../java/org/elasticsearch/action/Action.java | 44 ++++++++++-- .../elasticsearch/action/ActionModule.java | 10 +-- .../action/ActionRequestBuilder.java | 4 +- .../elasticsearch/action/GenericAction.java | 67 ------------------- .../action/TransportActionNodeProxy.java | 4 +- .../ClusterAllocationExplainAction.java | 2 +- .../cluster/health/ClusterHealthAction.java | 2 +- .../hotthreads/NodesHotThreadsAction.java | 2 +- .../cluster/node/info/NodesInfoAction.java | 2 +- .../NodesReloadSecureSettingsAction.java | 2 +- .../cluster/node/stats/NodesStatsAction.java | 2 +- .../node/tasks/cancel/CancelTasksAction.java | 2 +- .../cluster/node/tasks/get/GetTaskAction.java | 2 +- .../node/tasks/list/ListTasksAction.java | 2 +- .../cluster/node/usage/NodesUsageAction.java | 2 +- .../node/usage/NodesUsageRequestBuilder.java | 2 +- .../cluster/remote/RemoteInfoAction.java | 2 +- .../delete/DeleteRepositoryAction.java | 2 +- .../get/GetRepositoriesAction.java | 2 +- .../repositories/put/PutRepositoryAction.java | 2 +- .../verify/VerifyRepositoryAction.java | 2 +- .../cluster/reroute/ClusterRerouteAction.java | 2 +- .../settings/ClusterUpdateSettingsAction.java | 2 +- .../shards/ClusterSearchShardsAction.java | 2 +- .../create/CreateSnapshotAction.java | 2 +- .../delete/DeleteSnapshotAction.java | 2 +- .../snapshots/get/GetSnapshotsAction.java | 2 +- .../restore/RestoreSnapshotAction.java | 2 +- .../status/SnapshotsStatusAction.java | 2 +- .../cluster/state/ClusterStateAction.java | 2 +- .../cluster/stats/ClusterStatsAction.java | 2 +- .../DeleteStoredScriptAction.java | 2 +- .../storedscripts/GetStoredScriptAction.java | 2 +- .../storedscripts/PutStoredScriptAction.java | 2 +- .../tasks/PendingClusterTasksAction.java | 2 +- .../indices/alias/IndicesAliasesAction.java | 2 +- .../alias/exists/AliasesExistAction.java | 3 +- .../alias/get/BaseAliasesRequestBuilder.java | 2 +- .../indices/alias/get/GetAliasesAction.java | 2 +- .../admin/indices/analyze/AnalyzeAction.java | 2 +- .../cache/clear/ClearIndicesCacheAction.java | 2 +- .../admin/indices/close/CloseIndexAction.java | 2 +- .../indices/create/CreateIndexAction.java | 2 +- .../indices/delete/DeleteIndexAction.java | 2 +- .../exists/indices/IndicesExistsAction.java | 2 +- .../exists/types/TypesExistsAction.java | 2 +- .../admin/indices/flush/FlushAction.java | 2 +- .../indices/flush/SyncedFlushAction.java | 2 +- .../indices/forcemerge/ForceMergeAction.java | 2 +- .../admin/indices/get/GetIndexAction.java | 2 +- .../mapping/get/GetFieldMappingsAction.java | 2 +- .../mapping/get/GetMappingsAction.java | 2 +- .../indices/mapping/put/PutMappingAction.java | 2 +- .../admin/indices/open/OpenIndexAction.java | 2 +- .../indices/recovery/RecoveryAction.java | 2 +- .../admin/indices/refresh/RefreshAction.java | 2 +- .../indices/rollover/RolloverAction.java | 2 +- .../segments/IndicesSegmentsAction.java | 2 +- .../settings/get/GetSettingsAction.java | 2 +- .../settings/put/UpdateSettingsAction.java | 2 +- .../IndicesShardStoreRequestBuilder.java | 2 +- .../shards/IndicesShardStoresAction.java | 2 +- .../admin/indices/shrink/ResizeAction.java | 2 +- .../indices/shrink/ResizeRequestBuilder.java | 2 +- .../admin/indices/shrink/ShrinkAction.java | 2 +- .../indices/stats/IndicesStatsAction.java | 2 +- .../delete/DeleteIndexTemplateAction.java | 2 +- .../template/get/GetIndexTemplatesAction.java | 2 +- .../template/put/PutIndexTemplateAction.java | 2 +- .../upgrade/get/UpgradeStatusAction.java | 2 +- .../indices/upgrade/post/UpgradeAction.java | 2 +- .../upgrade/post/UpgradeSettingsAction.java | 2 +- .../validate/query/ValidateQueryAction.java | 2 +- .../elasticsearch/action/bulk/BulkAction.java | 2 +- .../action/delete/DeleteAction.java | 2 +- .../action/explain/ExplainAction.java | 2 +- .../fieldcaps/FieldCapabilitiesAction.java | 2 +- .../elasticsearch/action/get/GetAction.java | 2 +- .../action/get/MultiGetAction.java | 2 +- .../action/index/IndexAction.java | 2 +- .../action/ingest/DeletePipelineAction.java | 2 +- .../action/ingest/GetPipelineAction.java | 2 +- .../action/ingest/IngestActionForwarder.java | 2 +- .../action/ingest/PutPipelineAction.java | 2 +- .../action/ingest/SimulatePipelineAction.java | 2 +- .../elasticsearch/action/main/MainAction.java | 2 +- .../action/search/ClearScrollAction.java | 2 +- .../action/search/MultiSearchAction.java | 2 +- .../action/search/SearchAction.java | 2 +- .../action/search/SearchScrollAction.java | 2 +- .../BroadcastOperationRequestBuilder.java | 2 +- .../master/AcknowledgedRequestBuilder.java | 2 +- .../MasterNodeOperationRequestBuilder.java | 2 +- ...MasterNodeReadOperationRequestBuilder.java | 2 +- .../info/ClusterInfoRequestBuilder.java | 2 +- .../nodes/NodesOperationRequestBuilder.java | 2 +- .../ReplicationRequestBuilder.java | 2 +- .../InstanceShardOperationRequestBuilder.java | 2 +- .../SingleShardOperationRequestBuilder.java | 2 +- .../support/tasks/TasksRequestBuilder.java | 2 +- .../termvectors/MultiTermVectorsAction.java | 2 +- .../action/termvectors/TermVectorsAction.java | 2 +- .../action/update/UpdateAction.java | 2 +- .../client/ElasticsearchClient.java | 4 +- .../elasticsearch/client/FilterClient.java | 2 +- .../client/ParentTaskAssigningClient.java | 2 +- .../elasticsearch/client/node/NodeClient.java | 17 +++-- .../client/support/AbstractClient.java | 16 ++--- .../client/transport/TransportClient.java | 7 +- .../transport/TransportProxyClient.java | 11 ++- .../AbstractBulkByScrollRequestBuilder.java | 2 +- ...stractBulkIndexByScrollRequestBuilder.java | 2 +- .../index/reindex/DeleteByQueryAction.java | 2 +- .../reindex/DeleteByQueryRequestBuilder.java | 4 +- .../index/reindex/ReindexAction.java | 2 +- .../index/reindex/ReindexRequestBuilder.java | 4 +- .../index/reindex/UpdateByQueryAction.java | 2 +- .../reindex/UpdateByQueryRequestBuilder.java | 4 +- .../java/org/elasticsearch/node/Node.java | 4 +- .../CompletionPersistentTaskAction.java | 2 +- .../persistent/PersistentTasksService.java | 2 +- .../RemovePersistentTaskAction.java | 2 +- .../persistent/StartPersistentTaskAction.java | 2 +- .../UpdatePersistentTaskStatusAction.java | 2 +- .../elasticsearch/plugins/ActionPlugin.java | 14 ++-- .../transport/RemoteClusterAwareClient.java | 2 +- .../action/ActionModuleTests.java | 2 +- ...nericActionTests.java => ActionTests.java} | 4 +- .../cluster/node/tasks/TestTaskPlugin.java | 9 ++- .../client/AbstractClientHeadersTestCase.java | 6 +- .../ParentTaskAssigningClientTests.java | 4 +- .../client/node/NodeClientHeadersTests.java | 10 +-- .../TransportClientHeadersTests.java | 4 +- .../indices/settings/UpdateSettingsIT.java | 2 +- .../persistent/TestPersistentTasksPlugin.java | 2 +- .../elasticsearch/test/client/NoOpClient.java | 2 +- .../license/DeleteLicenseAction.java | 2 +- .../license/GetBasicStatusAction.java | 2 +- .../license/GetLicenseAction.java | 2 +- .../license/GetTrialStatusAction.java | 2 +- .../license/PostStartBasicAction.java | 2 +- .../license/PostStartTrialAction.java | 2 +- .../license/PutLicenseAction.java | 2 +- .../xpack/core/ClientHelper.java | 10 +-- .../xpack/core/XPackClientPlugin.java | 4 +- .../elasticsearch/xpack/core/XPackPlugin.java | 6 +- .../xpack/core/action/XPackInfoAction.java | 2 +- .../xpack/core/action/XPackUsageAction.java | 2 +- .../deprecation/DeprecationInfoAction.java | 2 +- .../core/graph/action/GraphExploreAction.java | 2 +- .../xpack/core/ml/action/CloseJobAction.java | 2 +- .../core/ml/action/DeleteCalendarAction.java | 2 +- .../ml/action/DeleteCalendarEventAction.java | 2 +- .../core/ml/action/DeleteDatafeedAction.java | 2 +- .../ml/action/DeleteExpiredDataAction.java | 2 +- .../core/ml/action/DeleteFilterAction.java | 2 +- .../xpack/core/ml/action/DeleteJobAction.java | 2 +- .../ml/action/DeleteModelSnapshotAction.java | 2 +- .../ml/action/FinalizeJobExecutionAction.java | 2 +- .../xpack/core/ml/action/FlushJobAction.java | 2 +- .../core/ml/action/ForecastJobAction.java | 2 +- .../core/ml/action/GetBucketsAction.java | 2 +- .../ml/action/GetCalendarEventsAction.java | 2 +- .../core/ml/action/GetCalendarsAction.java | 2 +- .../core/ml/action/GetCategoriesAction.java | 2 +- .../core/ml/action/GetDatafeedsAction.java | 2 +- .../ml/action/GetDatafeedsStatsAction.java | 2 +- .../core/ml/action/GetFiltersAction.java | 2 +- .../core/ml/action/GetInfluencersAction.java | 2 +- .../xpack/core/ml/action/GetJobsAction.java | 2 +- .../core/ml/action/GetJobsStatsAction.java | 2 +- .../ml/action/GetModelSnapshotsAction.java | 2 +- .../ml/action/GetOverallBucketsAction.java | 2 +- .../core/ml/action/GetRecordsAction.java | 2 +- .../core/ml/action/IsolateDatafeedAction.java | 2 +- .../core/ml/action/KillProcessAction.java | 2 +- .../xpack/core/ml/action/MlInfoAction.java | 2 +- .../xpack/core/ml/action/OpenJobAction.java | 2 +- .../core/ml/action/PersistJobAction.java | 2 +- .../ml/action/PostCalendarEventsAction.java | 2 +- .../xpack/core/ml/action/PostDataAction.java | 2 +- .../core/ml/action/PreviewDatafeedAction.java | 2 +- .../core/ml/action/PutCalendarAction.java | 2 +- .../core/ml/action/PutDatafeedAction.java | 2 +- .../xpack/core/ml/action/PutFilterAction.java | 2 +- .../xpack/core/ml/action/PutJobAction.java | 2 +- .../ml/action/RevertModelSnapshotAction.java | 3 +- .../core/ml/action/StartDatafeedAction.java | 2 +- .../core/ml/action/StopDatafeedAction.java | 2 +- .../ml/action/UpdateCalendarJobAction.java | 2 +- .../core/ml/action/UpdateDatafeedAction.java | 2 +- .../xpack/core/ml/action/UpdateJobAction.java | 2 +- .../ml/action/UpdateModelSnapshotAction.java | 2 +- .../core/ml/action/UpdateProcessAction.java | 2 +- .../ml/action/ValidateDetectorAction.java | 2 +- .../ml/action/ValidateJobConfigAction.java | 2 +- .../action/MonitoringBulkAction.java | 2 +- .../rollup/action/DeleteRollupJobAction.java | 2 +- .../rollup/action/GetRollupCapsAction.java | 2 +- .../rollup/action/GetRollupJobsAction.java | 2 +- .../rollup/action/PutRollupJobAction.java | 2 +- .../rollup/action/RollupSearchAction.java | 2 +- .../rollup/action/StartRollupJobAction.java | 2 +- .../rollup/action/StopRollupJobAction.java | 2 +- .../action/realm/ClearRealmCacheAction.java | 2 +- .../action/role/ClearRolesCacheAction.java | 2 +- .../action/role/DeleteRoleAction.java | 2 +- .../security/action/role/GetRolesAction.java | 2 +- .../security/action/role/PutRoleAction.java | 2 +- .../rolemapping/DeleteRoleMappingAction.java | 2 +- .../rolemapping/GetRoleMappingsAction.java | 4 +- .../rolemapping/PutRoleMappingAction.java | 2 +- .../action/saml/SamlAuthenticateAction.java | 2 +- .../saml/SamlInvalidateSessionAction.java | 2 +- .../action/saml/SamlLogoutAction.java | 2 +- .../saml/SamlPrepareAuthenticationAction.java | 2 +- .../action/token/CreateTokenAction.java | 2 +- .../token/CreateTokenRequestBuilder.java | 2 +- .../action/token/InvalidateTokenAction.java | 2 +- .../action/token/RefreshTokenAction.java | 2 +- .../action/user/AuthenticateAction.java | 2 +- .../action/user/ChangePasswordAction.java | 2 +- .../action/user/DeleteUserAction.java | 2 +- .../security/action/user/GetUsersAction.java | 2 +- .../action/user/HasPrivilegesAction.java | 2 +- .../security/action/user/PutUserAction.java | 2 +- .../action/user/SetEnabledAction.java | 2 +- .../ssl/action/GetCertificateInfoAction.java | 2 +- .../upgrade/actions/IndexUpgradeAction.java | 2 +- .../actions/IndexUpgradeInfoAction.java | 2 +- .../transport/actions/ack/AckWatchAction.java | 2 +- .../actions/activate/ActivateWatchAction.java | 2 +- .../actions/delete/DeleteWatchAction.java | 2 +- .../actions/execute/ExecuteWatchAction.java | 2 +- .../transport/actions/get/GetWatchAction.java | 2 +- .../transport/actions/put/PutWatchAction.java | 2 +- .../actions/service/WatcherServiceAction.java | 2 +- .../actions/stats/WatcherStatsAction.java | 2 +- .../action/oauth2/RestGetTokenAction.java | 2 +- ...sportSamlInvalidateSessionActionTests.java | 2 +- .../index/IndexAuditTrailMutedTests.java | 2 +- .../authc/esnative/NativeUsersStoreTests.java | 2 +- .../support/SecurityIndexManagerTests.java | 4 +- .../sql/plugin/SqlClearCursorAction.java | 2 +- .../xpack/sql/plugin/SqlQueryAction.java | 2 +- .../xpack/sql/plugin/SqlTranslateAction.java | 2 +- 260 files changed, 359 insertions(+), 400 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/action/GenericAction.java rename server/src/test/java/org/elasticsearch/action/{GenericActionTests.java => ActionTests.java} (93%) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java index 73678b2f5ea65..2bfd3b0cc8ed4 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java @@ -19,10 +19,9 @@ package org.elasticsearch.plugin.noop.action.bulk; import org.elasticsearch.action.Action; -import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; -public class NoopBulkAction extends Action { +public class NoopBulkAction extends Action { public static final String NAME = "mock:data/write/bulk"; public static final NoopBulkAction INSTANCE = new NoopBulkAction(); diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java index ca2c3d9adfc41..e7e515594a55d 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java @@ -19,10 +19,9 @@ package org.elasticsearch.plugin.noop.action.search; import org.elasticsearch.action.Action; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -public class NoopSearchAction extends Action { +public class NoopSearchAction extends Action { public static final NoopSearchAction INSTANCE = new NoopSearchAction(); public static final String NAME = "mock:data/read/search"; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index 5204a07b1c969..4b74bb800458d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -52,7 +52,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestStatus.OK; -public class GrokProcessorGetAction extends Action { +public class GrokProcessorGetAction extends Action { public static final GrokProcessorGetAction INSTANCE = new GrokProcessorGetAction(); public static final String NAME = "cluster:admin/ingest/processor/grok/get"; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java index 21b9a11e1f214..372b328bbfc1a 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class MultiSearchTemplateAction extends Action { +public class MultiSearchTemplateAction extends Action { public static final MultiSearchTemplateAction INSTANCE = new MultiSearchTemplateAction(); public static final String NAME = "indices:data/read/msearch/template"; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java index 1246e8e8e9bf2..a08329f48dcbb 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class SearchTemplateAction extends Action { +public class SearchTemplateAction extends Action { public static final SearchTemplateAction INSTANCE = new SearchTemplateAction(); public static final String NAME = "indices:data/read/search/template"; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index f91d349f80657..5430303feb262 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -62,7 +62,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestStatus.OK; -public class PainlessExecuteAction extends Action { +public class PainlessExecuteAction extends Action { static final PainlessExecuteAction INSTANCE = new PainlessExecuteAction(); private static final String NAME = "cluster:admin/scripts/painless/execute"; diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java index 441cbb5fac108..54e89fe0e98b8 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java @@ -24,7 +24,7 @@ /** * Action for explaining evaluating search ranking results. */ -public class RankEvalAction extends Action { +public class RankEvalAction extends Action { public static final RankEvalAction INSTANCE = new RankEvalAction(); public static final String NAME = "indices:data/read/rank_eval"; diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java index 146c987eff0ac..4108a817f046e 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java @@ -25,7 +25,7 @@ public class RankEvalRequestBuilder extends ActionRequestBuilder { - public RankEvalRequestBuilder(ElasticsearchClient client, Action action, + public RankEvalRequestBuilder(ElasticsearchClient client, Action action, RankEvalRequest request) { super(client, action, request); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java index de0c39b8f65ba..bf333352dd55c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; @@ -38,7 +38,7 @@ public abstract class AbstractBaseReindexRestHandler< Request extends AbstractBulkByScrollRequest, - A extends GenericAction + A extends Action > extends BaseRestHandler { private final A action; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java index 230828ed3fca9..fab94494fe13d 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -38,7 +38,7 @@ */ public abstract class AbstractBulkByQueryRestHandler< Request extends AbstractBulkByScrollRequest, - A extends GenericAction> extends AbstractBaseReindexRestHandler { + A extends Action> extends AbstractBaseReindexRestHandler { protected AbstractBulkByQueryRestHandler(Settings settings, A action) { super(settings, action); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java index 3cc0901c81e21..799d5874e15ba 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java @@ -61,7 +61,7 @@ private BulkByScrollParallelizationHelper() {} static > void startSlicedAction( Request request, BulkByScrollTask task, - Action action, + Action action, ActionListener listener, Client client, DiscoveryNode node, @@ -85,7 +85,7 @@ static > void startSlicedAc private static > void sliceConditionally( Request request, BulkByScrollTask task, - Action action, + Action action, ActionListener listener, Client client, DiscoveryNode node, @@ -118,7 +118,7 @@ private static int countSlicesBasedOnShards(ClusterSearchShardsResponse response private static > void sendSubRequests( Client client, - Action action, + Action action, String localNodeId, BulkByScrollTask task, Request request, diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java index ff0803c77425f..27c1b851439ae 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -public class RethrottleAction extends Action { +public class RethrottleAction extends Action { public static final RethrottleAction INSTANCE = new RethrottleAction(); public static final String NAME = "cluster:admin/reindex/rethrottle"; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java index b70389b5c9f3e..25407e6dc93d5 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java @@ -29,7 +29,7 @@ */ public class RethrottleRequestBuilder extends TasksRequestBuilder { public RethrottleRequestBuilder(ElasticsearchClient client, - Action action) { + Action action) { super(client, action, new RethrottleRequest()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 727710e8b6bdd..9d22b90ee7f5b 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -745,7 +745,7 @@ private class MyMockClient extends FilterClient { @SuppressWarnings("unchecked") protected > void doExecute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { if (false == expectedHeaders.equals(threadPool().getThreadContext().getHeaders())) { listener.onFailure( new RuntimeException("Expected " + expectedHeaders + " but got " + threadPool().getThreadContext().getHeaders())); diff --git a/server/src/main/java/org/elasticsearch/action/Action.java b/server/src/main/java/org/elasticsearch/action/Action.java index 8d419f379d3b0..2fc49d69ed1cc 100644 --- a/server/src/main/java/org/elasticsearch/action/Action.java +++ b/server/src/main/java/org/elasticsearch/action/Action.java @@ -19,13 +19,49 @@ package org.elasticsearch.action; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.transport.TransportRequestOptions; + /** - * Base action. Supports building the Request through a RequestBuilder. + * A generic action. Should strive to make it a singleton. */ -public abstract class Action - extends GenericAction { +public abstract class Action { + + private final String name; + /** + * @param name The name of the action, must be unique across actions. + */ protected Action(String name) { - super(name); + this.name = name; + } + + /** + * The name of the action. Must be unique across actions. + */ + public String name() { + return this.name; + } + + /** + * Creates a new response instance. + */ + public abstract Response newResponse(); + + /** + * Optional request options for the action. + */ + public TransportRequestOptions transportOptions(Settings settings) { + return TransportRequestOptions.EMPTY; + } + + @Override + public boolean equals(Object o) { + return o instanceof Action && name.equals(((Action) o).name()); + } + + @Override + public int hashCode() { + return name.hashCode(); } } diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 7ddb39b6d6225..324e75d64d80f 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -406,8 +406,8 @@ public void register(ActionHandler handler) { } public void register( - GenericAction action, Class> transportAction, - Class... supportTransportActions) { + Action action, Class> transportAction, + Class... supportTransportActions) { register(new ActionHandler<>(action, transportAction, supportTransportActions)); } } @@ -675,10 +675,10 @@ protected void configure() { bind(AutoCreateIndex.class).toInstance(autoCreateIndex); bind(TransportLivenessAction.class).asEagerSingleton(); - // register GenericAction -> transportAction Map used by NodeClient + // register Action -> transportAction Map used by NodeClient @SuppressWarnings("rawtypes") - MapBinder transportActionsBinder - = MapBinder.newMapBinder(binder(), GenericAction.class, TransportAction.class); + MapBinder transportActionsBinder + = MapBinder.newMapBinder(binder(), Action.class, TransportAction.class); for (ActionHandler action : actions.values()) { // bind the action as eager singleton, so the map binder one will reuse it bind(action.getTransportAction()).asEagerSingleton(); diff --git a/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java index 208c609333c8c..a3aa8ac2a5222 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java @@ -26,11 +26,11 @@ public abstract class ActionRequestBuilder { - protected final Action action; + protected final Action action; protected final Request request; protected final ElasticsearchClient client; - protected ActionRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected ActionRequestBuilder(ElasticsearchClient client, Action action, Request request) { Objects.requireNonNull(action, "action must not be null"); this.action = action; this.request = request; diff --git a/server/src/main/java/org/elasticsearch/action/GenericAction.java b/server/src/main/java/org/elasticsearch/action/GenericAction.java deleted file mode 100644 index 6220a1b2062bf..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/GenericAction.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.transport.TransportRequestOptions; - -/** - * A generic action. Should strive to make it a singleton. - */ -public abstract class GenericAction { - - private final String name; - - /** - * @param name The name of the action, must be unique across actions. - */ - protected GenericAction(String name) { - this.name = name; - } - - /** - * The name of the action. Must be unique across actions. - */ - public String name() { - return this.name; - } - - /** - * Creates a new response instance. - */ - public abstract Response newResponse(); - - /** - * Optional request options for the action. - */ - public TransportRequestOptions transportOptions(Settings settings) { - return TransportRequestOptions.EMPTY; - } - - @Override - public boolean equals(Object o) { - return o instanceof GenericAction && name.equals(((GenericAction) o).name()); - } - - @Override - public int hashCode() { - return name.hashCode(); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java b/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java index 2e7cbec93d9ae..c369deb0b10b3 100644 --- a/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java +++ b/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java @@ -31,10 +31,10 @@ public class TransportActionNodeProxy extends AbstractComponent { private final TransportService transportService; - private final GenericAction action; + private final Action action; private final TransportRequestOptions transportOptions; - public TransportActionNodeProxy(Settings settings, GenericAction action, TransportService transportService) { + public TransportActionNodeProxy(Settings settings, Action action, TransportService transportService) { super(settings); this.action = action; this.transportService = transportService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java index 04fb3e915d20f..19d5378b305ed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java @@ -24,7 +24,7 @@ /** * Action for explaining shard allocation for a shard in the cluster */ -public class ClusterAllocationExplainAction extends Action { +public class ClusterAllocationExplainAction extends Action { public static final ClusterAllocationExplainAction INSTANCE = new ClusterAllocationExplainAction(); public static final String NAME = "cluster:monitor/allocation/explain"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java index 31781ca13fe86..0cd148ee231e0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClusterHealthAction extends Action { +public class ClusterHealthAction extends Action { public static final ClusterHealthAction INSTANCE = new ClusterHealthAction(); public static final String NAME = "cluster:monitor/health"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java index a3b4161732700..4ea7ee5bc3bbe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class NodesHotThreadsAction extends Action { +public class NodesHotThreadsAction extends Action { public static final NodesHotThreadsAction INSTANCE = new NodesHotThreadsAction(); public static final String NAME = "cluster:monitor/nodes/hot_threads"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java index e46bc54d80004..edc5ed7e83f0f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class NodesInfoAction extends Action { +public class NodesInfoAction extends Action { public static final NodesInfoAction INSTANCE = new NodesInfoAction(); public static final String NAME = "cluster:monitor/nodes/info"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java index ccaeca8702f0b..19e8fc1929c5d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.Action; public class NodesReloadSecureSettingsAction - extends Action { + extends Action { public static final NodesReloadSecureSettingsAction INSTANCE = new NodesReloadSecureSettingsAction(); public static final String NAME = "cluster:admin/nodes/reload_secure_settings"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java index d8018716135cb..bc8c81ef1e0f5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class NodesStatsAction extends Action { +public class NodesStatsAction extends Action { public static final NodesStatsAction INSTANCE = new NodesStatsAction(); public static final String NAME = "cluster:monitor/nodes/stats"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java index 0c21c9a919849..0ea6162e59c7f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java @@ -24,7 +24,7 @@ /** * Action for cancelling running tasks */ -public class CancelTasksAction extends Action { +public class CancelTasksAction extends Action { public static final CancelTasksAction INSTANCE = new CancelTasksAction(); public static final String NAME = "cluster:admin/tasks/cancel"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java index 9dfe1e83e5f9a..d1e27e49088c7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java @@ -24,7 +24,7 @@ /** * Action for retrieving a list of currently running tasks */ -public class GetTaskAction extends Action { +public class GetTaskAction extends Action { public static final GetTaskAction INSTANCE = new GetTaskAction(); public static final String NAME = "cluster:monitor/task/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java index e054f074aa21e..b02d019859fcf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java @@ -24,7 +24,7 @@ /** * Action for retrieving a list of currently running tasks */ -public class ListTasksAction extends Action { +public class ListTasksAction extends Action { public static final ListTasksAction INSTANCE = new ListTasksAction(); public static final String NAME = "cluster:monitor/tasks/lists"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java index 7722339ebe0ee..6bc6dce54945a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class NodesUsageAction extends Action { +public class NodesUsageAction extends Action { public static final NodesUsageAction INSTANCE = new NodesUsageAction(); public static final String NAME = "cluster:monitor/nodes/usage"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequestBuilder.java index 05ab953a0d919..d8af249be0251 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequestBuilder.java @@ -26,7 +26,7 @@ public class NodesUsageRequestBuilder extends NodesOperationRequestBuilder { - public NodesUsageRequestBuilder(ElasticsearchClient client, Action action) { + public NodesUsageRequestBuilder(ElasticsearchClient client, Action action) { super(client, action, new NodesUsageRequest()); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoAction.java index 6e3c877156d80..3b998049daaaa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public final class RemoteInfoAction extends Action { +public final class RemoteInfoAction extends Action { public static final String NAME = "cluster:monitor/remote/info"; public static final RemoteInfoAction INSTANCE = new RemoteInfoAction(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java index 3d7f9187ea256..c5c72bb97f1e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java @@ -24,7 +24,7 @@ /** * Unregister repository action */ -public class DeleteRepositoryAction extends Action { +public class DeleteRepositoryAction extends Action { public static final DeleteRepositoryAction INSTANCE = new DeleteRepositoryAction(); public static final String NAME = "cluster:admin/repository/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java index 0ec28a4781deb..d89e466461d9b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java @@ -24,7 +24,7 @@ /** * Get repositories action */ -public class GetRepositoriesAction extends Action { +public class GetRepositoriesAction extends Action { public static final GetRepositoriesAction INSTANCE = new GetRepositoriesAction(); public static final String NAME = "cluster:admin/repository/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java index 3c3d24fb2d3f5..801e0a513b077 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java @@ -24,7 +24,7 @@ /** * Register repository action */ -public class PutRepositoryAction extends Action { +public class PutRepositoryAction extends Action { public static final PutRepositoryAction INSTANCE = new PutRepositoryAction(); public static final String NAME = "cluster:admin/repository/put"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java index 4d5865fdf9cee..743b0a57f3aa3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java @@ -24,7 +24,7 @@ /** * Unregister repository action */ -public class VerifyRepositoryAction extends Action { +public class VerifyRepositoryAction extends Action { public static final VerifyRepositoryAction INSTANCE = new VerifyRepositoryAction(); public static final String NAME = "cluster:admin/repository/verify"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java index 4366465ac69e9..06b083e3414b2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClusterRerouteAction extends Action { +public class ClusterRerouteAction extends Action { public static final ClusterRerouteAction INSTANCE = new ClusterRerouteAction(); public static final String NAME = "cluster:admin/reroute"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsAction.java index 347deae04c049..af6a87f5a57e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClusterUpdateSettingsAction extends Action { +public class ClusterUpdateSettingsAction extends Action { public static final ClusterUpdateSettingsAction INSTANCE = new ClusterUpdateSettingsAction(); public static final String NAME = "cluster:admin/settings/update"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java index 3184298ef8b2e..ec936c623a24a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClusterSearchShardsAction extends Action { +public class ClusterSearchShardsAction extends Action { public static final ClusterSearchShardsAction INSTANCE = new ClusterSearchShardsAction(); public static final String NAME = "indices:admin/shards/search_shards"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java index c51ff1ff009cd..d37132a1d81e7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java @@ -24,7 +24,7 @@ /** * Create snapshot action */ -public class CreateSnapshotAction extends Action { +public class CreateSnapshotAction extends Action { public static final CreateSnapshotAction INSTANCE = new CreateSnapshotAction(); public static final String NAME = "cluster:admin/snapshot/create"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java index 0a806e2d82d5e..ac04ea6690562 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java @@ -24,7 +24,7 @@ /** * Delete snapshot action */ -public class DeleteSnapshotAction extends Action { +public class DeleteSnapshotAction extends Action { public static final DeleteSnapshotAction INSTANCE = new DeleteSnapshotAction(); public static final String NAME = "cluster:admin/snapshot/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java index 7eb23a836aad2..b5015ff5c23b0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java @@ -24,7 +24,7 @@ /** * Get snapshots action */ -public class GetSnapshotsAction extends Action { +public class GetSnapshotsAction extends Action { public static final GetSnapshotsAction INSTANCE = new GetSnapshotsAction(); public static final String NAME = "cluster:admin/snapshot/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java index 4043784d470a7..e633ce43e66cd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java @@ -24,7 +24,7 @@ /** * Restore snapshot action */ -public class RestoreSnapshotAction extends Action { +public class RestoreSnapshotAction extends Action { public static final RestoreSnapshotAction INSTANCE = new RestoreSnapshotAction(); public static final String NAME = "cluster:admin/snapshot/restore"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java index 20d1e80451d5b..ea28d26b40ffa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java @@ -24,7 +24,7 @@ /** * Snapshots status action */ -public class SnapshotsStatusAction extends Action { +public class SnapshotsStatusAction extends Action { public static final SnapshotsStatusAction INSTANCE = new SnapshotsStatusAction(); public static final String NAME = "cluster:admin/snapshot/status"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java index 663622b62a319..f48df06d53c6f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClusterStateAction extends Action { +public class ClusterStateAction extends Action { public static final ClusterStateAction INSTANCE = new ClusterStateAction(); public static final String NAME = "cluster:monitor/state"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java index a62c0a9743c57..049ce62d9df40 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClusterStatsAction extends Action { +public class ClusterStatsAction extends Action { public static final ClusterStatsAction INSTANCE = new ClusterStatsAction(); public static final String NAME = "cluster:monitor/stats"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java index ab99a6aa8a75b..070d8d055ea89 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class DeleteStoredScriptAction extends Action { +public class DeleteStoredScriptAction extends Action { public static final DeleteStoredScriptAction INSTANCE = new DeleteStoredScriptAction(); public static final String NAME = "cluster:admin/script/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptAction.java index b5d5e864d0701..e8015a4487496 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetStoredScriptAction extends Action { +public class GetStoredScriptAction extends Action { public static final GetStoredScriptAction INSTANCE = new GetStoredScriptAction(); public static final String NAME = "cluster:admin/script/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java index 1924276e1c4da..41c345fd00733 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.Action; -public class PutStoredScriptAction extends Action { +public class PutStoredScriptAction extends Action { public static final PutStoredScriptAction INSTANCE = new PutStoredScriptAction(); public static final String NAME = "cluster:admin/script/put"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksAction.java index 98b17e9968405..296c65146a03c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class PendingClusterTasksAction extends Action { +public class PendingClusterTasksAction extends Action { public static final PendingClusterTasksAction INSTANCE = new PendingClusterTasksAction(); public static final String NAME = "cluster:monitor/task"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesAction.java index f584a7520fe87..378e017855a13 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class IndicesAliasesAction extends Action { +public class IndicesAliasesAction extends Action { public static final IndicesAliasesAction INSTANCE = new IndicesAliasesAction(); public static final String NAME = "indices:admin/aliases"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/AliasesExistAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/AliasesExistAction.java index de4d044e4cb0b..dfaebab076c66 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/AliasesExistAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/AliasesExistAction.java @@ -20,9 +20,8 @@ package org.elasticsearch.action.admin.indices.alias.exists; import org.elasticsearch.action.Action; -import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -public class AliasesExistAction extends Action { +public class AliasesExistAction extends Action { public static final AliasesExistAction INSTANCE = new AliasesExistAction(); public static final String NAME = "indices:admin/aliases/exists"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/BaseAliasesRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/BaseAliasesRequestBuilder.java index f50ae07d1f596..ebecc0dbf18ea 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/BaseAliasesRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/BaseAliasesRequestBuilder.java @@ -28,7 +28,7 @@ public abstract class BaseAliasesRequestBuilder> extends MasterNodeReadOperationRequestBuilder { - public BaseAliasesRequestBuilder(ElasticsearchClient client, Action action, String... aliases) { + public BaseAliasesRequestBuilder(ElasticsearchClient client, Action action, String... aliases) { super(client, action, new GetAliasesRequest(aliases)); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java index d8f2453c2cbd0..db423c2aaaa07 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetAliasesAction extends Action { +public class GetAliasesAction extends Action { public static final GetAliasesAction INSTANCE = new GetAliasesAction(); public static final String NAME = "indices:admin/aliases/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java index 1288eccede285..e2bbd655992de 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class AnalyzeAction extends Action { +public class AnalyzeAction extends Action { public static final AnalyzeAction INSTANCE = new AnalyzeAction(); public static final String NAME = "indices:admin/analyze"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java index f0b0b19ad5ffa..e5bdd53bdc7a7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClearIndicesCacheAction extends Action { +public class ClearIndicesCacheAction extends Action { public static final ClearIndicesCacheAction INSTANCE = new ClearIndicesCacheAction(); public static final String NAME = "indices:admin/cache/clear"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexAction.java index f206324f5ea54..5c3d60dd44013 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class CloseIndexAction extends Action { +public class CloseIndexAction extends Action { public static final CloseIndexAction INSTANCE = new CloseIndexAction(); public static final String NAME = "indices:admin/close"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java index cceadaabefddc..3993ea7256f61 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class CreateIndexAction extends Action { +public class CreateIndexAction extends Action { public static final CreateIndexAction INSTANCE = new CreateIndexAction(); public static final String NAME = "indices:admin/create"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java index 40cabf5e78175..680501af3b761 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class DeleteIndexAction extends Action { +public class DeleteIndexAction extends Action { public static final DeleteIndexAction INSTANCE = new DeleteIndexAction(); public static final String NAME = "indices:admin/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsAction.java index af655b9d1b081..b878994549f5a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class IndicesExistsAction extends Action { +public class IndicesExistsAction extends Action { public static final IndicesExistsAction INSTANCE = new IndicesExistsAction(); public static final String NAME = "indices:admin/exists"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsAction.java index 3f3e1d98b5058..0b508110d7f50 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsAction.java @@ -20,7 +20,7 @@ import org.elasticsearch.action.Action; -public class TypesExistsAction extends Action { +public class TypesExistsAction extends Action { public static final TypesExistsAction INSTANCE = new TypesExistsAction(); public static final String NAME = "indices:admin/types/exists"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java index 41e7d214760b3..60d5b43a6c189 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class FlushAction extends Action { +public class FlushAction extends Action { public static final FlushAction INSTANCE = new FlushAction(); public static final String NAME = "indices:admin/flush"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java index 7501f2dc2c238..5005cd2ec0878 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.Action; -public class SyncedFlushAction extends Action { +public class SyncedFlushAction extends Action { public static final SyncedFlushAction INSTANCE = new SyncedFlushAction(); public static final String NAME = "indices:admin/synced_flush"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java index 524d41333012b..51095435343cb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ForceMergeAction extends Action { +public class ForceMergeAction extends Action { public static final ForceMergeAction INSTANCE = new ForceMergeAction(); public static final String NAME = "indices:admin/forcemerge"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java index 843d331db95a0..86396f246a414 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetIndexAction extends Action { +public class GetIndexAction extends Action { public static final GetIndexAction INSTANCE = new GetIndexAction(); public static final String NAME = "indices:admin/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java index 58fd60f997429..5aa19652b676d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetFieldMappingsAction extends Action { +public class GetFieldMappingsAction extends Action { public static final GetFieldMappingsAction INSTANCE = new GetFieldMappingsAction(); public static final String NAME = "indices:admin/mappings/fields/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java index 08042baa803ef..8bae685fff5dd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetMappingsAction extends Action { +public class GetMappingsAction extends Action { public static final GetMappingsAction INSTANCE = new GetMappingsAction(); public static final String NAME = "indices:admin/mappings/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingAction.java index cdbf0bb8b53c2..63ab198f816bf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class PutMappingAction extends Action { +public class PutMappingAction extends Action { public static final PutMappingAction INSTANCE = new PutMappingAction(); public static final String NAME = "indices:admin/mapping/put"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java index 669dbfc78a5b0..ea6f1eb6afd95 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class OpenIndexAction extends Action { +public class OpenIndexAction extends Action { public static final OpenIndexAction INSTANCE = new OpenIndexAction(); public static final String NAME = "indices:admin/open"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java index 453d52c02a087..bfe261b58843a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java @@ -24,7 +24,7 @@ /** * Recovery information action */ -public class RecoveryAction extends Action { +public class RecoveryAction extends Action { public static final RecoveryAction INSTANCE = new RecoveryAction(); public static final String NAME = "indices:monitor/recovery"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java index 059e26d29fec5..b0dac076b2f49 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class RefreshAction extends Action { +public class RefreshAction extends Action { public static final RefreshAction INSTANCE = new RefreshAction(); public static final String NAME = "indices:admin/refresh"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java index 18edb82af08d7..0bcd4eefe0762 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class RolloverAction extends Action { +public class RolloverAction extends Action { public static final RolloverAction INSTANCE = new RolloverAction(); public static final String NAME = "indices:admin/rollover"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java index 420a2dea04cc5..669c31d6b087d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class IndicesSegmentsAction extends Action { +public class IndicesSegmentsAction extends Action { public static final IndicesSegmentsAction INSTANCE = new IndicesSegmentsAction(); public static final String NAME = "indices:monitor/segments"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java index b06af8726878a..e4149aaf8f15f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetSettingsAction extends Action { +public class GetSettingsAction extends Action { public static final GetSettingsAction INSTANCE = new GetSettingsAction(); public static final String NAME = "indices:monitor/settings/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsAction.java index 9a8c667e18202..7f82c8f29e750 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class UpdateSettingsAction extends Action { +public class UpdateSettingsAction extends Action { public static final UpdateSettingsAction INSTANCE = new UpdateSettingsAction(); public static final String NAME = "indices:admin/settings/update"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestBuilder.java index bb21ea9f85eac..cf38feae56f13 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestBuilder.java @@ -30,7 +30,7 @@ */ public class IndicesShardStoreRequestBuilder extends MasterNodeReadOperationRequestBuilder { - public IndicesShardStoreRequestBuilder(ElasticsearchClient client, Action action, String... indices) { + public IndicesShardStoreRequestBuilder(ElasticsearchClient client, Action action, String... indices) { super(client, action, new IndicesShardStoresRequest(indices)); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresAction.java index 8bb08ee0f09a8..d3ce0077d5e94 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresAction.java @@ -28,7 +28,7 @@ * Shard store information reports which nodes hold shard copies, how recent they are * and any exceptions on opening the shard index or from previous engine failures */ -public class IndicesShardStoresAction extends Action { +public class IndicesShardStoresAction extends Action { public static final IndicesShardStoresAction INSTANCE = new IndicesShardStoresAction(); public static final String NAME = "indices:monitor/shard_stores"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java index 5791999b7fc85..72dcb17f212be 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.Action; -public class ResizeAction extends Action { +public class ResizeAction extends Action { public static final ResizeAction INSTANCE = new ResizeAction(); public static final String NAME = "indices:admin/resize"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java index 36f645c16170c..e4b9a34b00415 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java @@ -27,7 +27,7 @@ public class ResizeRequestBuilder extends AcknowledgedRequestBuilder { - public ResizeRequestBuilder(ElasticsearchClient client, Action action) { + public ResizeRequestBuilder(ElasticsearchClient client, Action action) { super(client, action, new ResizeRequest()); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java index cddc8390b546e..a2c689ba360ee 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ShrinkAction extends Action { +public class ShrinkAction extends Action { public static final ShrinkAction INSTANCE = new ShrinkAction(); public static final String NAME = "indices:admin/shrink"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java index 60363722193cb..6765279eb6b87 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class IndicesStatsAction extends Action { +public class IndicesStatsAction extends Action { public static final IndicesStatsAction INSTANCE = new IndicesStatsAction(); public static final String NAME = "indices:monitor/stats"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateAction.java index 5cef3045846a2..47f37b9e6cfee 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class DeleteIndexTemplateAction extends Action { +public class DeleteIndexTemplateAction extends Action { public static final DeleteIndexTemplateAction INSTANCE = new DeleteIndexTemplateAction(); public static final String NAME = "indices:admin/template/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java index 1735e7fee3873..ac00b80079ca1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java @@ -20,7 +20,7 @@ import org.elasticsearch.action.Action; -public class GetIndexTemplatesAction extends Action { +public class GetIndexTemplatesAction extends Action { public static final GetIndexTemplatesAction INSTANCE = new GetIndexTemplatesAction(); public static final String NAME = "indices:admin/template/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateAction.java index b73384452e6ee..399170eaeced9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class PutIndexTemplateAction extends Action { +public class PutIndexTemplateAction extends Action { public static final PutIndexTemplateAction INSTANCE = new PutIndexTemplateAction(); public static final String NAME = "indices:admin/template/put"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java index 9e078a53bb714..57506b615d692 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class UpgradeStatusAction extends Action { +public class UpgradeStatusAction extends Action { public static final UpgradeStatusAction INSTANCE = new UpgradeStatusAction(); public static final String NAME = "indices:monitor/upgrade"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java index 20f780cd0bce7..7ec83930e44bf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java @@ -24,7 +24,7 @@ /** * Upgrade index/indices action. */ -public class UpgradeAction extends Action { +public class UpgradeAction extends Action { public static final UpgradeAction INSTANCE = new UpgradeAction(); public static final String NAME = "indices:admin/upgrade"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java index aabb9b3c660d4..5f1ee88e34e8b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class UpgradeSettingsAction extends Action { +public class UpgradeSettingsAction extends Action { public static final UpgradeSettingsAction INSTANCE = new UpgradeSettingsAction(); public static final String NAME = "internal:indices/admin/upgrade"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java index 47c982d5f6d85..93151dd8a2ba9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ValidateQueryAction extends Action { +public class ValidateQueryAction extends Action { public static final ValidateQueryAction INSTANCE = new ValidateQueryAction(); public static final String NAME = "indices:admin/validate/query"; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java index ea7d0160fd996..84b854a98ee86 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.transport.TransportRequestOptions; -public class BulkAction extends Action { +public class BulkAction extends Action { public static final BulkAction INSTANCE = new BulkAction(); public static final String NAME = "indices:data/write/bulk"; diff --git a/server/src/main/java/org/elasticsearch/action/delete/DeleteAction.java b/server/src/main/java/org/elasticsearch/action/delete/DeleteAction.java index 84135b617b59b..d78b6f60bffc3 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/DeleteAction.java +++ b/server/src/main/java/org/elasticsearch/action/delete/DeleteAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class DeleteAction extends Action { +public class DeleteAction extends Action { public static final DeleteAction INSTANCE = new DeleteAction(); public static final String NAME = "indices:data/write/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java index b298e62f514a1..13c9d94e7dbc7 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java @@ -24,7 +24,7 @@ /** * Entry point for the explain feature. */ -public class ExplainAction extends Action { +public class ExplainAction extends Action { public static final ExplainAction INSTANCE = new ExplainAction(); public static final String NAME = "indices:data/read/explain"; diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java index 24896d3c620d1..39c6ecce308e0 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class FieldCapabilitiesAction extends Action { +public class FieldCapabilitiesAction extends Action { public static final FieldCapabilitiesAction INSTANCE = new FieldCapabilitiesAction(); public static final String NAME = "indices:data/read/field_caps"; diff --git a/server/src/main/java/org/elasticsearch/action/get/GetAction.java b/server/src/main/java/org/elasticsearch/action/get/GetAction.java index c9df5ffb98e00..a622fd5a8178b 100644 --- a/server/src/main/java/org/elasticsearch/action/get/GetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/GetAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetAction extends Action { +public class GetAction extends Action { public static final GetAction INSTANCE = new GetAction(); public static final String NAME = "indices:data/read/get"; diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetAction.java index 1080b71fb9f65..9b69e33239b82 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class MultiGetAction extends Action { +public class MultiGetAction extends Action { public static final MultiGetAction INSTANCE = new MultiGetAction(); public static final String NAME = "indices:data/read/mget"; diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexAction.java b/server/src/main/java/org/elasticsearch/action/index/IndexAction.java index 9642f28b2a417..4f3e6068a2a23 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class IndexAction extends Action { +public class IndexAction extends Action { public static final IndexAction INSTANCE = new IndexAction(); public static final String NAME = "indices:data/write/index"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java index 03271f439c709..e0df57a6dadf6 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class DeletePipelineAction extends Action { +public class DeletePipelineAction extends Action { public static final DeletePipelineAction INSTANCE = new DeletePipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java index 82e1a546b6e52..b2305227ac67f 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetPipelineAction extends Action { +public class GetPipelineAction extends Action { public static final GetPipelineAction INSTANCE = new GetPipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/get"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/IngestActionForwarder.java b/server/src/main/java/org/elasticsearch/action/ingest/IngestActionForwarder.java index 991e233220726..6f5147c38bdbb 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/IngestActionForwarder.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/IngestActionForwarder.java @@ -47,7 +47,7 @@ public IngestActionForwarder(TransportService transportService) { ingestNodes = new DiscoveryNode[0]; } - public void forwardIngestRequest(Action action, ActionRequest request, ActionListener listener) { + public void forwardIngestRequest(Action action, ActionRequest request, ActionListener listener) { transportService.sendRequest(randomIngestNode(), action.name(), request, new ActionListenerResponseHandler(listener, action::newResponse)); } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java index 068cbea08399b..c4784598ae75f 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class PutPipelineAction extends Action { +public class PutPipelineAction extends Action { public static final PutPipelineAction INSTANCE = new PutPipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/put"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java index 4da35ba25b7eb..afeb4e01fb0ff 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class SimulatePipelineAction extends Action { +public class SimulatePipelineAction extends Action { public static final SimulatePipelineAction INSTANCE = new SimulatePipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/simulate"; diff --git a/server/src/main/java/org/elasticsearch/action/main/MainAction.java b/server/src/main/java/org/elasticsearch/action/main/MainAction.java index 10fd1f9887c7d..831ddd0983fad 100644 --- a/server/src/main/java/org/elasticsearch/action/main/MainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/MainAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class MainAction extends Action { +public class MainAction extends Action { public static final String NAME = "cluster:monitor/main"; public static final MainAction INSTANCE = new MainAction(); diff --git a/server/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java index 4edda430c5cda..660ed1ee17860 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClearScrollAction extends Action { +public class ClearScrollAction extends Action { public static final ClearScrollAction INSTANCE = new ClearScrollAction(); public static final String NAME = "indices:data/read/scroll/clear"; diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java index c2c8c4ce23256..298c7593f6c97 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class MultiSearchAction extends Action { +public class MultiSearchAction extends Action { public static final MultiSearchAction INSTANCE = new MultiSearchAction(); public static final String NAME = "indices:data/read/msearch"; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchAction.java index 80bc1abcca8c6..e028f6c5cd524 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class SearchAction extends Action { +public class SearchAction extends Action { public static final SearchAction INSTANCE = new SearchAction(); public static final String NAME = "indices:data/read/search"; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java index 209e3c7f583e2..ff72a7e5e51f3 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class SearchScrollAction extends Action { +public class SearchScrollAction extends Action { public static final SearchScrollAction INSTANCE = new SearchScrollAction(); public static final String NAME = "indices:data/read/scroll"; diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java index 056381cc7dc11..f2ba67e3a9f4c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java @@ -27,7 +27,7 @@ public abstract class BroadcastOperationRequestBuilder, Response extends BroadcastResponse, RequestBuilder extends BroadcastOperationRequestBuilder> extends ActionRequestBuilder { - protected BroadcastOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected BroadcastOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequestBuilder.java index 45d92801170f5..1269024aeca8c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequestBuilder.java @@ -28,7 +28,7 @@ public abstract class AcknowledgedRequestBuilder, Response extends AcknowledgedResponse, RequestBuilder extends AcknowledgedRequestBuilder> extends MasterNodeOperationRequestBuilder { - protected AcknowledgedRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected AcknowledgedRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java index 1302d2003da19..33a7b344743f4 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java @@ -31,7 +31,7 @@ public abstract class MasterNodeOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends MasterNodeOperationRequestBuilder> extends ActionRequestBuilder { - protected MasterNodeOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected MasterNodeOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java index 96735f101e6a7..85a66d33db91e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java @@ -29,7 +29,7 @@ public abstract class MasterNodeReadOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends MasterNodeReadOperationRequestBuilder> extends MasterNodeOperationRequestBuilder { - protected MasterNodeReadOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected MasterNodeReadOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequestBuilder.java index 26dedf15da80d..d8989ba10bc01 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequestBuilder.java @@ -28,7 +28,7 @@ public abstract class ClusterInfoRequestBuilder, Response extends ActionResponse, Builder extends ClusterInfoRequestBuilder> extends MasterNodeReadOperationRequestBuilder { - protected ClusterInfoRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected ClusterInfoRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java index 37c06a419589a..f81e6c9cf41c7 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java @@ -27,7 +27,7 @@ public abstract class NodesOperationRequestBuilder, Response extends BaseNodesResponse, RequestBuilder extends NodesOperationRequestBuilder> extends ActionRequestBuilder { - protected NodesOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected NodesOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java index 32fbaf70ab35c..9dc7a899d033c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java @@ -29,7 +29,7 @@ public abstract class ReplicationRequestBuilder, Response extends ActionResponse, RequestBuilder extends ReplicationRequestBuilder> extends ActionRequestBuilder { - protected ReplicationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected ReplicationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java index 0c5d26b0aed64..0880ed9fa6205 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java @@ -28,7 +28,7 @@ public abstract class InstanceShardOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends InstanceShardOperationRequestBuilder> extends ActionRequestBuilder { - protected InstanceShardOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected InstanceShardOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequestBuilder.java index 1de3479fb5d52..b5a2d67bc9d51 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequestBuilder.java @@ -27,7 +27,7 @@ public abstract class SingleShardOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends SingleShardOperationRequestBuilder> extends ActionRequestBuilder { - protected SingleShardOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected SingleShardOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java index cda4497437a7b..52e3c2cf44e33 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java @@ -33,7 +33,7 @@ public class TasksRequestBuilder< RequestBuilder extends TasksRequestBuilder > extends ActionRequestBuilder { - protected TasksRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected TasksRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java index f7a9eda6cc265..a894b3480f10f 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class MultiTermVectorsAction extends Action { +public class MultiTermVectorsAction extends Action { public static final MultiTermVectorsAction INSTANCE = new MultiTermVectorsAction(); public static final String NAME = "indices:data/read/mtv"; diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java index ded987c52a085..e701efe93ba7a 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class TermVectorsAction extends Action { +public class TermVectorsAction extends Action { public static final TermVectorsAction INSTANCE = new TermVectorsAction(); public static final String NAME = "indices:data/read/tv"; diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/UpdateAction.java index 734169283dd39..1c8c80b61cef1 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class UpdateAction extends Action { +public class UpdateAction extends Action { public static final UpdateAction INSTANCE = new UpdateAction(); public static final String NAME = "indices:data/write/update"; diff --git a/server/src/main/java/org/elasticsearch/client/ElasticsearchClient.java b/server/src/main/java/org/elasticsearch/client/ElasticsearchClient.java index a0021e6e4c1a8..284c2b8d51420 100644 --- a/server/src/main/java/org/elasticsearch/client/ElasticsearchClient.java +++ b/server/src/main/java/org/elasticsearch/client/ElasticsearchClient.java @@ -39,7 +39,7 @@ public interface ElasticsearchClient { * @return A future allowing to get back the response. */ ActionFuture execute( - Action action, Request request); + Action action, Request request); /** * Executes a generic action, denoted by an {@link Action}. @@ -51,7 +51,7 @@ ActionFuture The response type. */ void execute( - Action action, Request request, ActionListener listener); + Action action, Request request, ActionListener listener); /** * Returns the threadpool used to execute requests on this client diff --git a/server/src/main/java/org/elasticsearch/client/FilterClient.java b/server/src/main/java/org/elasticsearch/client/FilterClient.java index ac94cdf017680..bfccabac58043 100644 --- a/server/src/main/java/org/elasticsearch/client/FilterClient.java +++ b/server/src/main/java/org/elasticsearch/client/FilterClient.java @@ -63,7 +63,7 @@ public void close() { @Override protected > void doExecute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { in().execute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java b/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java index 180c0f21b8d9a..a0934ba633dd5 100644 --- a/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java +++ b/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java @@ -61,7 +61,7 @@ public Client unwrap() { protected < Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder - > void doExecute(Action action, Request request, ActionListener listener) { + > void doExecute(Action action, Request request, ActionListener listener) { request.setParentTask(parentTask); super.doExecute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/client/node/NodeClient.java b/server/src/main/java/org/elasticsearch/client/node/NodeClient.java index 3f214c90b6701..9e50fa56fab60 100644 --- a/server/src/main/java/org/elasticsearch/client/node/NodeClient.java +++ b/server/src/main/java/org/elasticsearch/client/node/NodeClient.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.client.support.AbstractClient; @@ -43,10 +42,10 @@ */ public class NodeClient extends AbstractClient { - private Map actions; + private Map actions; /** * The id of the local {@link DiscoveryNode}. Useful for generating task ids from tasks returned by - * {@link #executeLocally(GenericAction, ActionRequest, TaskListener)}. + * {@link #executeLocally(Action, ActionRequest, TaskListener)}. */ private Supplier localNodeId; private RemoteClusterService remoteClusterService; @@ -55,7 +54,7 @@ public NodeClient(Settings settings, ThreadPool threadPool) { super(settings, threadPool); } - public void initialize(Map actions, Supplier localNodeId, + public void initialize(Map actions, Supplier localNodeId, RemoteClusterService remoteClusterService) { this.actions = actions; this.localNodeId = localNodeId; @@ -71,7 +70,7 @@ public void close() { public < Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder - > void doExecute(Action action, Request request, ActionListener listener) { + > void doExecute(Action action, Request request, ActionListener listener) { // Discard the task because the Client interface doesn't use it. executeLocally(action, request, listener); } @@ -83,7 +82,7 @@ > void doExecute(Action action, Request request, ActionListen */ public < Request extends ActionRequest, Response extends ActionResponse - > Task executeLocally(GenericAction action, Request request, ActionListener listener) { + > Task executeLocally(Action action, Request request, ActionListener listener) { return transportAction(action).execute(request, listener); } @@ -93,13 +92,13 @@ > Task executeLocally(GenericAction action, Request request, */ public < Request extends ActionRequest, Response extends ActionResponse - > Task executeLocally(GenericAction action, Request request, TaskListener listener) { + > Task executeLocally(Action action, Request request, TaskListener listener) { return transportAction(action).execute(request, listener); } /** * The id of the local {@link DiscoveryNode}. Useful for generating task ids from tasks returned by - * {@link #executeLocally(GenericAction, ActionRequest, TaskListener)}. + * {@link #executeLocally(Action, ActionRequest, TaskListener)}. */ public String getLocalNodeId() { return localNodeId.get(); @@ -111,7 +110,7 @@ public String getLocalNodeId() { @SuppressWarnings("unchecked") private < Request extends ActionRequest, Response extends ActionResponse - > TransportAction transportAction(GenericAction action) { + > TransportAction transportAction(Action action) { if (actions == null) { throw new IllegalStateException("NodeClient has not been initialized"); } diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index dc70da4e61f7e..12db219f8ec78 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -385,7 +385,7 @@ public final AdminClient admin() { @Override public final ActionFuture execute( - Action action, Request request) { + Action action, Request request) { PlainActionFuture actionFuture = PlainActionFuture.newFuture(); execute(action, request, actionFuture); return actionFuture; @@ -396,12 +396,12 @@ public final Ac */ @Override public final void execute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { listener = threadedWrapper.wrap(listener); doExecute(action, request, listener); } - protected abstract > void doExecute(Action action, Request request, ActionListener listener); + protected abstract > void doExecute(Action action, Request request, ActionListener listener); @Override public ActionFuture index(final IndexRequest request) { @@ -698,13 +698,13 @@ static class ClusterAdmin implements ClusterAdminClient { @Override public ActionFuture execute( - Action action, Request request) { + Action action, Request request) { return client.execute(action, request); } @Override public void execute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { client.execute(action, request, listener); } @@ -1238,13 +1238,13 @@ static class IndicesAdmin implements IndicesAdminClient { @Override public ActionFuture execute( - Action action, Request request) { + Action action, Request request) { return client.execute(action, request); } @Override public void execute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { client.execute(action, request, listener); } @@ -1764,7 +1764,7 @@ public void getSettings(GetSettingsRequest request, ActionListener headers) { return new FilterClient(this) { @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected > void doExecute(Action action, Request request, ActionListener listener) { ThreadContext threadContext = threadPool().getThreadContext(); try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(headers)) { super.doExecute(action, request, listener); diff --git a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java index ecdecc4457bdd..53f6dea21c7d1 100644 --- a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.GenericAction; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -209,10 +208,10 @@ private static ClientTemplate buildTemplate(Settings providedSettings, Settings // construct the list of client actions final List actionPlugins = pluginsService.filterPlugins(ActionPlugin.class); - final List clientActions = + final List clientActions = actionPlugins.stream().flatMap(p -> p.getClientActions().stream()).collect(Collectors.toList()); // add all the base actions - final List> baseActions = + final List> baseActions = actionModule.getActions().values().stream().map(ActionPlugin.ActionHandler::getAction).collect(Collectors.toList()); clientActions.addAll(baseActions); final TransportProxyClient proxy = new TransportProxyClient(settings, transportService, nodesService, clientActions); @@ -378,7 +377,7 @@ public void close() { } @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected > void doExecute(Action action, Request request, ActionListener listener) { proxy.execute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/client/transport/TransportProxyClient.java b/server/src/main/java/org/elasticsearch/client/transport/TransportProxyClient.java index 249fd54ef69d9..d79e2a9119e6a 100644 --- a/server/src/main/java/org/elasticsearch/client/transport/TransportProxyClient.java +++ b/server/src/main/java/org/elasticsearch/client/transport/TransportProxyClient.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.TransportActionNodeProxy; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.transport.TransportService; @@ -41,19 +40,17 @@ final class TransportProxyClient { private final Map proxies; TransportProxyClient(Settings settings, TransportService transportService, - TransportClientNodesService nodesService, List actions) { + TransportClientNodesService nodesService, List actions) { this.nodesService = nodesService; Map proxies = new HashMap<>(); - for (GenericAction action : actions) { - if (action instanceof Action) { - proxies.put((Action) action, new TransportActionNodeProxy(settings, action, transportService)); - } + for (Action action : actions) { + proxies.put(action, new TransportActionNodeProxy(settings, action, transportService)); } this.proxies = unmodifiableMap(proxies); } public > void execute(final Action action, + ActionRequestBuilder> void execute(final Action action, final Request request, ActionListener listener) { final TransportActionNodeProxy proxy = proxies.get(action); assert proxy != null : "no proxy found for action: " + action; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java index 5ffccef23f51c..227814e24302e 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java @@ -35,7 +35,7 @@ public abstract class AbstractBulkByScrollRequestBuilder< private final SearchRequestBuilder source; protected AbstractBulkByScrollRequestBuilder(ElasticsearchClient client, - Action action, SearchRequestBuilder source, Request request) { + Action action, SearchRequestBuilder source, Request request) { super(client, action, request); this.source = source; } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequestBuilder.java index d8cf2f49149f7..519c6e062d565 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequestBuilder.java @@ -30,7 +30,7 @@ public abstract class AbstractBulkIndexByScrollRequestBuilder< extends AbstractBulkByScrollRequestBuilder { protected AbstractBulkIndexByScrollRequestBuilder(ElasticsearchClient client, - Action action, SearchRequestBuilder search, Request request) { + Action action, SearchRequestBuilder search, Request request) { super(client, action, search, request); } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java index 639c615727fe0..c7cfe28e2c0be 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class DeleteByQueryAction extends Action { +public class DeleteByQueryAction extends Action { public static final DeleteByQueryAction INSTANCE = new DeleteByQueryAction(); public static final String NAME = "indices:data/write/delete/byquery"; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java index ef2057806e1ce..202aa6e93f19d 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java @@ -27,12 +27,12 @@ public class DeleteByQueryRequestBuilder extends AbstractBulkByScrollRequestBuilder { - public DeleteByQueryRequestBuilder(ElasticsearchClient client, Action action) { + public DeleteByQueryRequestBuilder(ElasticsearchClient client, Action action) { this(client, action, new SearchRequestBuilder(client, SearchAction.INSTANCE)); } private DeleteByQueryRequestBuilder(ElasticsearchClient client, - Action action, + Action action, SearchRequestBuilder search) { super(client, action, search, new DeleteByQueryRequest(search.request())); } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java index c4e4a68d114d0..86d0c96602a3c 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ReindexAction extends Action { +public class ReindexAction extends Action { public static final ReindexAction INSTANCE = new ReindexAction(); public static final String NAME = "indices:data/write/reindex"; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java index 3d20bca989b83..c775ae197db93 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java @@ -31,13 +31,13 @@ public class ReindexRequestBuilder extends private final IndexRequestBuilder destination; public ReindexRequestBuilder(ElasticsearchClient client, - Action action) { + Action action) { this(client, action, new SearchRequestBuilder(client, SearchAction.INSTANCE), new IndexRequestBuilder(client, IndexAction.INSTANCE)); } private ReindexRequestBuilder(ElasticsearchClient client, - Action action, + Action action, SearchRequestBuilder search, IndexRequestBuilder destination) { super(client, action, search, new ReindexRequest(search.request(), destination.request())); this.destination = destination; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java index b2fb0a205eb08..250a267ea255d 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class UpdateByQueryAction extends Action { +public class UpdateByQueryAction extends Action { public static final UpdateByQueryAction INSTANCE = new UpdateByQueryAction(); public static final String NAME = "indices:data/write/update/byquery"; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java index dc56bb9cf9de3..6b9600dea5eae 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java @@ -27,12 +27,12 @@ public class UpdateByQueryRequestBuilder extends AbstractBulkIndexByScrollRequestBuilder { - public UpdateByQueryRequestBuilder(ElasticsearchClient client, Action action) { + public UpdateByQueryRequestBuilder(ElasticsearchClient client, Action action) { this(client, action, new SearchRequestBuilder(client, SearchAction.INSTANCE)); } private UpdateByQueryRequestBuilder(ElasticsearchClient client, - Action action, + Action action, SearchRequestBuilder search) { super(client, action, search, new UpdateByQueryRequest(search.request())); } diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index aa9b3943e8863..4440153dd361e 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -27,7 +27,7 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionModule; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.search.SearchExecutionStatsCollector; import org.elasticsearch.action.search.SearchPhaseController; import org.elasticsearch.action.search.SearchTransportService; @@ -540,7 +540,7 @@ protected Node(final Environment environment, Collection .map(injector::getInstance).collect(Collectors.toList())); resourcesToClose.addAll(pluginLifecycleComponents); this.pluginLifecycleComponents = Collections.unmodifiableList(pluginLifecycleComponents); - client.initialize(injector.getInstance(new Key>() {}), + client.initialize(injector.getInstance(new Key>() {}), () -> clusterService.localNode().getId(), transportService.getRemoteClusterService()); logger.debug("initializing HTTP handlers ..."); diff --git a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java index b7281e1026a63..1e0f83958bbac 100644 --- a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java @@ -48,7 +48,7 @@ * Action that is used by executor node to indicate that the persistent action finished or failed on the node and needs to be * removed from the cluster state in case of successful completion or restarted on some other node in case of failure. */ -public class CompletionPersistentTaskAction extends Action { +public class CompletionPersistentTaskAction extends Action { public static final CompletionPersistentTaskAction INSTANCE = new CompletionPersistentTaskAction(); public static final String NAME = "cluster:admin/persistent/completion"; diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java index d0c791e3df046..665a803a2d9cf 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java @@ -137,7 +137,7 @@ public void sendRemoveRequest(final String taskId, final ActionListener - void execute(final Req request, final Action action, final ActionListener> listener) { + void execute(final Req request, final Action action, final ActionListener> listener) { try { final ThreadContext threadContext = client.threadPool().getThreadContext(); final Supplier supplier = threadContext.newRestorableContext(false); diff --git a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java index f278b5bcc5e22..38890e6a12c12 100644 --- a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java @@ -42,7 +42,7 @@ import java.io.IOException; import java.util.Objects; -public class RemovePersistentTaskAction extends Action { +public class RemovePersistentTaskAction extends Action { public static final RemovePersistentTaskAction INSTANCE = new RemovePersistentTaskAction(); public static final String NAME = "cluster:admin/persistent/remove"; diff --git a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java index 6b338c2469717..eb71b7ad13641 100644 --- a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java @@ -49,7 +49,7 @@ /** * This action can be used to add the record for the persistent action to the cluster state. */ -public class StartPersistentTaskAction extends Action { +public class StartPersistentTaskAction extends Action { public static final StartPersistentTaskAction INSTANCE = new StartPersistentTaskAction(); public static final String NAME = "cluster:admin/persistent/start"; diff --git a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java index a639e4bde5360..040db78aa450b 100644 --- a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java @@ -44,7 +44,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class UpdatePersistentTaskStatusAction extends Action { +public class UpdatePersistentTaskStatusAction extends Action { public static final UpdatePersistentTaskStatusAction INSTANCE = new UpdatePersistentTaskStatusAction(); public static final String NAME = "cluster:admin/persistent/update_status"; diff --git a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java index 7454d74349ea6..eb8b7130d7054 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.TransportActions; @@ -65,10 +65,10 @@ public interface ActionPlugin { } /** - * Client actions added by this plugin. This defaults to all of the {@linkplain GenericAction} in + * Client actions added by this plugin. This defaults to all of the {@linkplain Action} in * {@linkplain ActionPlugin#getActions()}. */ - default List getClientActions() { + default List getClientActions() { return getActions().stream().map(a -> a.action).collect(Collectors.toList()); } @@ -111,7 +111,7 @@ default UnaryOperator getRestHandlerWrapper(ThreadContext threadCon } final class ActionHandler { - private final GenericAction action; + private final Action action; private final Class> transportAction; private final Class[] supportTransportActions; @@ -119,14 +119,14 @@ final class ActionHandler action, Class> transportAction, - Class... supportTransportActions) { + public ActionHandler(Action action, Class> transportAction, + Class... supportTransportActions) { this.action = action; this.transportAction = transportAction; this.supportTransportActions = supportTransportActions; } - public GenericAction getAction() { + public Action getAction() { return action; } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java index 70a07db12475f..aca8cdccaddb8 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java @@ -45,7 +45,7 @@ final class RemoteClusterAwareClient extends AbstractClient { @Override protected > - void doExecute(Action action, Request request, ActionListener listener) { + void doExecute(Action action, Request request, ActionListener listener) { remoteClusterService.ensureConnected(clusterAlias, ActionListener.wrap(res -> { Transport.Connection connection = remoteClusterService.getConnection(clusterAlias); service.sendRequest(connection, action.name(), request, TransportRequestOptions.EMPTY, diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index a766a3e3c5635..f79cb02d83aa5 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -88,7 +88,7 @@ protected FakeTransportAction(Settings settings, String actionName, ThreadPool t protected void doExecute(FakeRequest request, ActionListener listener) { } } - class FakeAction extends GenericAction { + class FakeAction extends Action { protected FakeAction() { super("fake"); } diff --git a/server/src/test/java/org/elasticsearch/action/GenericActionTests.java b/server/src/test/java/org/elasticsearch/action/ActionTests.java similarity index 93% rename from server/src/test/java/org/elasticsearch/action/GenericActionTests.java rename to server/src/test/java/org/elasticsearch/action/ActionTests.java index 1bbff4b2a99b5..c159d36ca9158 100644 --- a/server/src/test/java/org/elasticsearch/action/GenericActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionTests.java @@ -21,10 +21,10 @@ import org.elasticsearch.test.ESTestCase; -public class GenericActionTests extends ESTestCase { +public class ActionTests extends ESTestCase { public void testEquals() { - class FakeAction extends GenericAction { + class FakeAction extends Action { protected FakeAction(String name) { super(name); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index a98c9088b8dc0..bac8f4a8730da 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -45,7 +45,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugins.ActionPlugin; @@ -323,7 +322,7 @@ protected NodeResponse nodeOperation(NodeRequest request) { } - public static class TestTaskAction extends Action { + public static class TestTaskAction extends Action { public static final TestTaskAction INSTANCE = new TestTaskAction(); public static final String NAME = "cluster:admin/tasks/test"; @@ -340,7 +339,7 @@ public NodesResponse newResponse() { public static class NodesRequestBuilder extends NodesOperationRequestBuilder { - protected NodesRequestBuilder(ElasticsearchClient client, Action action) { + protected NodesRequestBuilder(ElasticsearchClient client, Action action) { super(client, action, new NodesRequest("test")); } @@ -454,7 +453,7 @@ protected void taskOperation(UnblockTestTasksRequest request, Task task, ActionL } - public static class UnblockTestTasksAction extends Action { + public static class UnblockTestTasksAction extends Action { public static final UnblockTestTasksAction INSTANCE = new UnblockTestTasksAction(); public static final String NAME = "cluster:admin/tasks/testunblock"; @@ -472,7 +471,7 @@ public UnblockTestTasksResponse newResponse() { public static class UnblockTestTasksRequestBuilder extends ActionRequestBuilder { protected UnblockTestTasksRequestBuilder(ElasticsearchClient client, - Action action) { + Action action) { super(client, action, new UnblockTestTasksRequest()); } } diff --git a/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java index db9f9d83c816a..5dea451dbacfd 100644 --- a/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java +++ b/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java @@ -21,7 +21,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; @@ -56,7 +56,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { .put(ThreadContext.PREFIX + ".key2", "val 2") .build(); - private static final GenericAction[] ACTIONS = new GenericAction[] { + private static final Action[] ACTIONS = new Action[] { // client actions GetAction.INSTANCE, SearchAction.INSTANCE, DeleteAction.INSTANCE, DeleteStoredScriptAction.INSTANCE, IndexAction.INSTANCE, @@ -92,7 +92,7 @@ public void tearDown() throws Exception { terminate(threadPool); } - protected abstract Client buildClient(Settings headersSettings, GenericAction[] testedActions); + protected abstract Client buildClient(Settings headersSettings, Action[] testedActions); public void testActions() { diff --git a/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java b/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java index 00b293bcffb34..bff713a225482 100644 --- a/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java +++ b/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java @@ -41,8 +41,8 @@ public void testSetsParentId() { protected < Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder - > void doExecute( Action action, Request request, - ActionListener listener) { + > void doExecute(Action action, Request request, + ActionListener listener) { assertEquals(parentTaskId[0], request.getParentTask()); super.doExecute(action, request, listener); } diff --git a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index 5e739cc325040..dc1f146b452de 100644 --- a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; @@ -39,7 +39,7 @@ public class NodeClientHeadersTests extends AbstractClientHeadersTestCase { private static final ActionFilters EMPTY_FILTERS = new ActionFilters(Collections.emptySet()); @Override - protected Client buildClient(Settings headersSettings, GenericAction[] testedActions) { + protected Client buildClient(Settings headersSettings, Action[] testedActions) { Settings settings = HEADER_SETTINGS; Actions actions = new Actions(settings, threadPool, testedActions); NodeClient client = new NodeClient(settings, threadPool); @@ -47,10 +47,10 @@ protected Client buildClient(Settings headersSettings, GenericAction[] testedAct return client; } - private static class Actions extends HashMap { + private static class Actions extends HashMap { - private Actions(Settings settings, ThreadPool threadPool, GenericAction[] actions) { - for (GenericAction action : actions) { + private Actions(Settings settings, ThreadPool threadPool, Action[] actions) { + for (Action action : actions) { put(action, new InternalTransportAction(settings, action.name(), threadPool)); } } diff --git a/server/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java index b9ba6aa0050e4..82806938a0b32 100644 --- a/server/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.client.transport; import org.elasticsearch.Version; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.admin.cluster.node.liveness.LivenessResponse; import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; @@ -76,7 +76,7 @@ public void tearDown() throws Exception { } @Override - protected Client buildClient(Settings headersSettings, GenericAction[] testedActions) { + protected Client buildClient(Settings headersSettings, Action[] testedActions) { transportService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null); transportService.start(); transportService.acceptIncomingRequests(); diff --git a/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index 8093e7d38a14d..069f965ac6b3b 100644 --- a/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -162,7 +162,7 @@ public List> getSettings() { } public static class UpdateInternalIndexAction - extends Action { + extends Action { private static final UpdateInternalIndexAction INSTANCE = new UpdateInternalIndexAction(); private static final String NAME = "indices:admin/settings/update-internal-index"; diff --git a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java index 063a861b5c315..ddcda1058039c 100644 --- a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java +++ b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java @@ -383,7 +383,7 @@ protected AllocatedPersistentTask createTask(long id, String type, String action } } - public static class TestTaskAction extends Action { + public static class TestTaskAction extends Action { public static final TestTaskAction INSTANCE = new TestTaskAction(); public static final String NAME = "cluster:admin/persistent/task_test"; diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java index 93cb3475a1249..8ef08a259821a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java @@ -54,7 +54,7 @@ public NoOpClient(String testName) { protected > - void doExecute(Action action, Request request, ActionListener listener) { + void doExecute(Action action, Request request, ActionListener listener) { listener.onResponse(null); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java index 309246ca9d760..a2c8d609be084 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class DeleteLicenseAction extends Action { +public class DeleteLicenseAction extends Action { public static final DeleteLicenseAction INSTANCE = new DeleteLicenseAction(); public static final String NAME = "cluster:admin/xpack/license/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java index 75d5c2da10680..be97ff59172fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class GetBasicStatusAction extends Action { +public class GetBasicStatusAction extends Action { public static final GetBasicStatusAction INSTANCE = new GetBasicStatusAction(); public static final String NAME = "cluster:admin/xpack/license/basic_status"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java index be8d46e31d0b3..a6f19ea95b1e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class GetLicenseAction extends Action { +public class GetLicenseAction extends Action { public static final GetLicenseAction INSTANCE = new GetLicenseAction(); public static final String NAME = "cluster:monitor/xpack/license/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java index 619300b1d7791..69c14e1b6dc83 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class GetTrialStatusAction extends Action { +public class GetTrialStatusAction extends Action { public static final GetTrialStatusAction INSTANCE = new GetTrialStatusAction(); public static final String NAME = "cluster:admin/xpack/license/trial_status"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java index 747632d9d1d8a..864bfa2b7780f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class PostStartBasicAction extends Action { +public class PostStartBasicAction extends Action { public static final PostStartBasicAction INSTANCE = new PostStartBasicAction(); public static final String NAME = "cluster:admin/xpack/license/start_basic"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java index c7817f73b91cb..609fa42caabc2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class PostStartTrialAction extends Action { +public class PostStartTrialAction extends Action { public static final PostStartTrialAction INSTANCE = new PostStartTrialAction(); public static final String NAME = "cluster:admin/xpack/license/start_trial"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java index b96f13190eddb..d93957a9d8bae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class PutLicenseAction extends Action { +public class PutLicenseAction extends Action { public static final PutLicenseAction INSTANCE = new PutLicenseAction(); public static final String NAME = "cluster:admin/xpack/license/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java index 8b6d72aac3ce8..ff3091bde93b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java @@ -81,8 +81,8 @@ public static v */ public static > void executeAsyncWithOrigin( - Client client, String origin, Action action, Request request, - ActionListener listener) { + Client client, String origin, Action action, Request request, + ActionListener listener) { final ThreadContext threadContext = client.threadPool().getThreadContext(); final Supplier supplier = threadContext.newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(threadContext, origin)) { @@ -140,8 +140,8 @@ public static T executeWithHeaders(Map> void executeWithHeadersAsync( - Map headers, String origin, Client client, Action action, Request request, - ActionListener listener) { + Map headers, String origin, Client client, Action action, Request request, + ActionListener listener) { Map filteredHeaders = headers.entrySet().stream().filter(e -> SECURITY_HEADER_FILTERS.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); @@ -178,7 +178,7 @@ private ClientWithOrigin(Client in, String origin) { @Override protected > void doExecute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { final Supplier supplier = in().threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = in().threadPool().getThreadContext().stashContext()) { in().threadPool().getThreadContext().putTransient(ACTION_ORIGIN_TRANSIENT_NAME, origin); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 049089e62cf26..2894138248b8c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.core; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.MetaData; @@ -204,7 +204,7 @@ static Settings additionalSettings(final Settings settings, final boolean enable } @Override - public List getClientActions() { + public List getClientActions() { return Arrays.asList( // deprecation DeprecationInfoAction.INSTANCE, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index db36aabf7ac6a..920081572cfc7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -11,7 +11,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; @@ -261,8 +261,8 @@ public Collection createComponents(Client client, ClusterService cluster } @Override - public List getClientActions() { - List actions = new ArrayList<>(); + public List getClientActions() { + List actions = new ArrayList<>(); actions.addAll(licensing.getClientActions()); actions.addAll(super.getClientActions()); return actions; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java index 585153000a24b..8690973d7678e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java @@ -8,7 +8,7 @@ import org.elasticsearch.action.Action; import org.elasticsearch.license.XPackInfoResponse; -public class XPackInfoAction extends Action { +public class XPackInfoAction extends Action { public static final String NAME = "cluster:monitor/xpack/info"; public static final XPackInfoAction INSTANCE = new XPackInfoAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java index 906aaf3f4dada..40311a4e88457 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class XPackUsageAction extends Action { +public class XPackUsageAction extends Action { public static final String NAME = "cluster:monitor/xpack/usage"; public static final XPackUsageAction INSTANCE = new XPackUsageAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java index d3b7bd1452901..09c6a0d57524e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java @@ -37,7 +37,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class DeprecationInfoAction extends Action { +public class DeprecationInfoAction extends Action { public static final DeprecationInfoAction INSTANCE = new DeprecationInfoAction(); public static final String NAME = "cluster:admin/xpack/deprecation/info"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java index ebc8e1be32051..5503eb692558b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class GraphExploreAction extends Action { +public class GraphExploreAction extends Action { public static final GraphExploreAction INSTANCE = new GraphExploreAction(); public static final String NAME = "indices:data/read/xpack/graph/explore"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java index 3710f5b96f60c..019bad54a5d24 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java @@ -27,7 +27,7 @@ import java.io.IOException; import java.util.Objects; -public class CloseJobAction extends Action { +public class CloseJobAction extends Action { public static final CloseJobAction INSTANCE = new CloseJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/close"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java index 4cdbd03cf4752..a6e5f8da88cd7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java @@ -19,7 +19,7 @@ import java.io.IOException; import java.util.Objects; -public class DeleteCalendarAction extends Action { +public class DeleteCalendarAction extends Action { public static final DeleteCalendarAction INSTANCE = new DeleteCalendarAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java index 9eaf643be7688..24fc55f59ccb7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.util.Objects; -public class DeleteCalendarEventAction extends Action { +public class DeleteCalendarEventAction extends Action { public static final DeleteCalendarEventAction INSTANCE = new DeleteCalendarEventAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/events/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java index 44580763601ed..fba0fe4cf1f6c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.Objects; -public class DeleteDatafeedAction extends Action { +public class DeleteDatafeedAction extends Action { public static final DeleteDatafeedAction INSTANCE = new DeleteDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeeds/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java index 56361cd7ed2a5..271d8ad5fa33c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.util.Objects; -public class DeleteExpiredDataAction extends Action { +public class DeleteExpiredDataAction extends Action { public static final DeleteExpiredDataAction INSTANCE = new DeleteExpiredDataAction(); public static final String NAME = "cluster:admin/xpack/ml/delete_expired_data"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java index 8faa5e5876178..b271c6fbdf02e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java @@ -20,7 +20,7 @@ import java.util.Objects; -public class DeleteFilterAction extends Action { +public class DeleteFilterAction extends Action { public static final DeleteFilterAction INSTANCE = new DeleteFilterAction(); public static final String NAME = "cluster:admin/xpack/ml/filters/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java index bbc59594b586e..71e5f8fad71d7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java @@ -24,7 +24,7 @@ import java.util.Map; import java.util.Objects; -public class DeleteJobAction extends Action { +public class DeleteJobAction extends Action { public static final DeleteJobAction INSTANCE = new DeleteJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java index 70c3a90d4fad2..a80fbc7863825 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java @@ -19,7 +19,7 @@ import java.io.IOException; -public class DeleteModelSnapshotAction extends Action { +public class DeleteModelSnapshotAction extends Action { public static final DeleteModelSnapshotAction INSTANCE = new DeleteModelSnapshotAction(); public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java index 4532c422d25ea..558d25f62de32 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java @@ -16,7 +16,7 @@ import java.io.IOException; -public class FinalizeJobExecutionAction extends Action { +public class FinalizeJobExecutionAction extends Action { public static final FinalizeJobExecutionAction INSTANCE = new FinalizeJobExecutionAction(); public static final String NAME = "cluster:internal/xpack/ml/job/finalize_job_execution"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java index 206128a8784ca..ef086b5126228 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java @@ -26,7 +26,7 @@ import java.util.Date; import java.util.Objects; -public class FlushJobAction extends Action { +public class FlushJobAction extends Action { public static final FlushJobAction INSTANCE = new FlushJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/flush"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java index 75ba8f195370c..327941a2c055c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java @@ -24,7 +24,7 @@ import java.io.IOException; import java.util.Objects; -public class ForecastJobAction extends Action { +public class ForecastJobAction extends Action { public static final ForecastJobAction INSTANCE = new ForecastJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/forecast"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java index 6abbd2dfbdfd9..29b3d4bb8d557 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java @@ -30,7 +30,7 @@ import java.io.IOException; import java.util.Objects; -public class GetBucketsAction extends Action { +public class GetBucketsAction extends Action { public static final GetBucketsAction INSTANCE = new GetBucketsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/buckets/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java index 19cf114772d8a..6c707fcbdb726 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class GetCalendarEventsAction extends Action { +public class GetCalendarEventsAction extends Action { public static final GetCalendarEventsAction INSTANCE = new GetCalendarEventsAction(); public static final String NAME = "cluster:monitor/xpack/ml/calendars/events/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java index 60ab7adbbcedf..1fb945ef2422b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java @@ -29,7 +29,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetCalendarsAction extends Action { +public class GetCalendarsAction extends Action { public static final GetCalendarsAction INSTANCE = new GetCalendarsAction(); public static final String NAME = "cluster:monitor/xpack/ml/calendars/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java index 3bb459013995d..4b4dcb5f79cc5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java @@ -29,7 +29,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetCategoriesAction extends Action { +public class GetCategoriesAction extends Action { public static final GetCategoriesAction INSTANCE = new GetCategoriesAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/categories/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java index 5bd6e96e79cf9..d75e03b87f20d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java @@ -25,7 +25,7 @@ import java.io.IOException; import java.util.Objects; -public class GetDatafeedsAction extends Action { +public class GetDatafeedsAction extends Action { public static final GetDatafeedsAction INSTANCE = new GetDatafeedsAction(); public static final String NAME = "cluster:monitor/xpack/ml/datafeeds/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java index 9dc2042d9cfd3..823c158d2a6e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java @@ -30,7 +30,7 @@ import java.util.Map; import java.util.Objects; -public class GetDatafeedsStatsAction extends Action { +public class GetDatafeedsStatsAction extends Action { public static final GetDatafeedsStatsAction INSTANCE = new GetDatafeedsStatsAction(); public static final String NAME = "cluster:monitor/xpack/ml/datafeeds/stats/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java index 541366248ee7c..f13f303396e08 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java @@ -27,7 +27,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetFiltersAction extends Action { +public class GetFiltersAction extends Action { public static final GetFiltersAction INSTANCE = new GetFiltersAction(); public static final String NAME = "cluster:admin/xpack/ml/filters/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java index 803bfda63ebc2..35f0675c6237a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java @@ -29,7 +29,7 @@ import java.util.Objects; public class GetInfluencersAction -extends Action { +extends Action { public static final GetInfluencersAction INSTANCE = new GetInfluencersAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/influencers/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java index 8382345773450..063efc7145271 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java @@ -25,7 +25,7 @@ import java.io.IOException; import java.util.Objects; -public class GetJobsAction extends Action { +public class GetJobsAction extends Action { public static final GetJobsAction INSTANCE = new GetJobsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java index c2c3c9b8d6c32..1ec9f0c473232 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java @@ -39,7 +39,7 @@ import java.util.Map; import java.util.Objects; -public class GetJobsStatsAction extends Action { +public class GetJobsStatsAction extends Action { public static final GetJobsStatsAction INSTANCE = new GetJobsStatsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/stats/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java index 3bd990577603b..c349fa6527be9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class GetModelSnapshotsAction extends Action { +public class GetModelSnapshotsAction extends Action { public static final GetModelSnapshotsAction INSTANCE = new GetModelSnapshotsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/model_snapshots/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java index 81dad665577c8..e6ace63f44a7f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java @@ -48,7 +48,7 @@ * the interval. *

    */ -public class GetOverallBucketsAction extends Action { +public class GetOverallBucketsAction extends Action { public static final GetOverallBucketsAction INSTANCE = new GetOverallBucketsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/overall_buckets/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetRecordsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetRecordsAction.java index 7d1fb839704af..cd76c54f45277 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetRecordsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetRecordsAction.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class GetRecordsAction extends Action { +public class GetRecordsAction extends Action { public static final GetRecordsAction INSTANCE = new GetRecordsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/records/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java index 7bafe5056af67..451679f364600 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java @@ -35,7 +35,7 @@ * task ensures the current datafeed task can complete inconsequentially while * the datafeed persistent task may be stopped or reassigned on another node. */ -public class IsolateDatafeedAction extends Action { +public class IsolateDatafeedAction extends Action { public static final IsolateDatafeedAction INSTANCE = new IsolateDatafeedAction(); public static final String NAME = "cluster:internal/xpack/ml/datafeed/isolate"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java index 5edb988351b0e..96440ebe50306 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java @@ -16,7 +16,7 @@ import java.io.IOException; import java.util.Objects; -public class KillProcessAction extends Action { +public class KillProcessAction extends Action { public static final KillProcessAction INSTANCE = new KillProcessAction(); public static final String NAME = "cluster:internal/xpack/ml/job/kill/process"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java index 38544e576171b..b0d635202c9fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java @@ -21,7 +21,7 @@ import java.util.Map; import java.util.Objects; -public class MlInfoAction extends Action { +public class MlInfoAction extends Action { public static final MlInfoAction INSTANCE = new MlInfoAction(); public static final String NAME = "cluster:monitor/xpack/ml/info/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java index 0ce24aafefa79..0c7380349bde1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java @@ -31,7 +31,7 @@ import java.io.IOException; import java.util.Objects; -public class OpenJobAction extends Action { +public class OpenJobAction extends Action { public static final OpenJobAction INSTANCE = new OpenJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/open"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java index f1d86dd784abe..12ebed924dbcc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java @@ -16,7 +16,7 @@ import java.io.IOException; import java.util.Objects; -public class PersistJobAction extends Action { +public class PersistJobAction extends Action { public static final PersistJobAction INSTANCE = new PersistJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/persist"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java index aa4fc9ee2ea44..beff26eb34d82 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java @@ -30,7 +30,7 @@ import java.util.Objects; import java.util.stream.Collectors; -public class PostCalendarEventsAction extends Action { +public class PostCalendarEventsAction extends Action { public static final PostCalendarEventsAction INSTANCE = new PostCalendarEventsAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/events/post"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java index 69dd7e69ca149..ccc745d9742ad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java @@ -24,7 +24,7 @@ import java.io.IOException; import java.util.Objects; -public class PostDataAction extends Action { +public class PostDataAction extends Action { public static final PostDataAction INSTANCE = new PostDataAction(); public static final String NAME = "cluster:admin/xpack/ml/job/data/post"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java index 46eb6578e86ec..af8a99b9828bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java @@ -25,7 +25,7 @@ import java.io.InputStream; import java.util.Objects; -public class PreviewDatafeedAction extends Action { +public class PreviewDatafeedAction extends Action { public static final PreviewDatafeedAction INSTANCE = new PreviewDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeeds/preview"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java index ace5869dc508d..345c4f1a96db4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java @@ -28,7 +28,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class PutCalendarAction extends Action { +public class PutCalendarAction extends Action { public static final PutCalendarAction INSTANCE = new PutCalendarAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java index 5142abac08622..a0c757a0be6a0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.Objects; -public class PutDatafeedAction extends Action { +public class PutDatafeedAction extends Action { public static final PutDatafeedAction INSTANCE = new PutDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeeds/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java index 8269a105b6463..0ed5e8f22aadb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java @@ -25,7 +25,7 @@ import java.util.Objects; -public class PutFilterAction extends Action { +public class PutFilterAction extends Action { public static final PutFilterAction INSTANCE = new PutFilterAction(); public static final String NAME = "cluster:admin/xpack/ml/filters/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java index a556a58d50380..7e85198d2143e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java @@ -25,7 +25,7 @@ import java.util.List; import java.util.Objects; -public class PutJobAction extends Action { +public class PutJobAction extends Action { public static final PutJobAction INSTANCE = new PutJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java index d5b14abd6f9c8..316598b6ab505 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java @@ -29,8 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class RevertModelSnapshotAction -extends Action { +public class RevertModelSnapshotAction extends Action { public static final RevertModelSnapshotAction INSTANCE = new RevertModelSnapshotAction(); public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/revert"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java index 9df1f2fbd2e9b..5c45d33e744d6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java @@ -34,7 +34,7 @@ import java.util.Objects; import java.util.function.LongSupplier; -public class StartDatafeedAction extends Action { +public class StartDatafeedAction extends Action { public static final ParseField START_TIME = new ParseField("start"); public static final ParseField END_TIME = new ParseField("end"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java index 4df27e1b984a2..0117225141085 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class StopDatafeedAction extends Action { +public class StopDatafeedAction extends Action { public static final StopDatafeedAction INSTANCE = new StopDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeed/stop"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java index dfe6499d2c4e8..e70a2e3189b01 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java @@ -18,7 +18,7 @@ import java.io.IOException; import java.util.Objects; -public class UpdateCalendarJobAction extends Action { +public class UpdateCalendarJobAction extends Action { public static final UpdateCalendarJobAction INSTANCE = new UpdateCalendarJobAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/jobs/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java index 8ed170b8603b2..6ba34efa839b5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.util.Objects; -public class UpdateDatafeedAction extends Action { +public class UpdateDatafeedAction extends Action { public static final UpdateDatafeedAction INSTANCE = new UpdateDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeeds/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java index 020d4c22ee31f..d4fe804c451af 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java @@ -24,7 +24,7 @@ import java.io.IOException; import java.util.Objects; -public class UpdateJobAction extends Action { +public class UpdateJobAction extends Action { public static final UpdateJobAction INSTANCE = new UpdateJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java index 9703fed25ef9f..1414719693f2c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class UpdateModelSnapshotAction extends Action { +public class UpdateModelSnapshotAction extends Action { public static final UpdateModelSnapshotAction INSTANCE = new UpdateModelSnapshotAction(); public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java index b99a0b7400782..00b1d67bfff69 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java @@ -24,7 +24,7 @@ import java.util.List; import java.util.Objects; -public class UpdateProcessAction extends Action { +public class UpdateProcessAction extends Action { public static final UpdateProcessAction INSTANCE = new UpdateProcessAction(); public static final String NAME = "cluster:internal/xpack/ml/job/update/process"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java index aba3d492e488c..0e807664d86fa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.util.Objects; -public class ValidateDetectorAction extends Action { +public class ValidateDetectorAction extends Action { public static final ValidateDetectorAction INSTANCE = new ValidateDetectorAction(); public static final String NAME = "cluster:admin/xpack/ml/job/validate/detector"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java index c4ef75417c241..00a8813c1f18b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; -public class ValidateJobConfigAction extends Action { +public class ValidateJobConfigAction extends Action { public static final ValidateJobConfigAction INSTANCE = new ValidateJobConfigAction(); public static final String NAME = "cluster:admin/xpack/ml/job/validate"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java index 05ea4b8ed2c6d..49fb085191e4e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class MonitoringBulkAction extends Action { +public class MonitoringBulkAction extends Action { public static final MonitoringBulkAction INSTANCE = new MonitoringBulkAction(); public static final String NAME = "cluster:admin/xpack/monitoring/bulk"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java index 71ad404d44aa1..e59c6738d86b1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.Objects; -public class DeleteRollupJobAction extends Action { +public class DeleteRollupJobAction extends Action { public static final DeleteRollupJobAction INSTANCE = new DeleteRollupJobAction(); public static final String NAME = "cluster:admin/xpack/rollup/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java index 55f0d6139ae50..ea98c2f4628e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java @@ -28,7 +28,7 @@ import java.util.Map; import java.util.Objects; -public class GetRollupCapsAction extends Action { +public class GetRollupCapsAction extends Action { public static final GetRollupCapsAction INSTANCE = new GetRollupCapsAction(); public static final String NAME = "cluster:monitor/xpack/rollup/get/caps"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java index 5372773867034..d5a5e7a07fadf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java @@ -35,7 +35,7 @@ import java.util.List; import java.util.Objects; -public class GetRollupJobsAction extends Action { +public class GetRollupJobsAction extends Action { public static final GetRollupJobsAction INSTANCE = new GetRollupJobsAction(); public static final String NAME = "cluster:monitor/xpack/rollup/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java index 6283b295cdb16..9c3767d418856 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java @@ -25,7 +25,7 @@ import java.util.Map; import java.util.Objects; -public class PutRollupJobAction extends Action { +public class PutRollupJobAction extends Action { public static final PutRollupJobAction INSTANCE = new PutRollupJobAction(); public static final String NAME = "cluster:admin/xpack/rollup/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java index c79eacb80ae02..3980282321cc8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.ElasticsearchClient; -public class RollupSearchAction extends Action { +public class RollupSearchAction extends Action { public static final RollupSearchAction INSTANCE = new RollupSearchAction(); public static final String NAME = "indices:admin/xpack/rollup/search"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java index a3e4b4054e2fa..e3dcb1a882f9f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java @@ -25,7 +25,7 @@ import java.util.Collections; import java.util.Objects; -public class StartRollupJobAction extends Action { +public class StartRollupJobAction extends Action { public static final StartRollupJobAction INSTANCE = new StartRollupJobAction(); public static final String NAME = "cluster:admin/xpack/rollup/start"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java index 7b9b06f8ac891..eb48d640f21eb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java @@ -24,7 +24,7 @@ import java.util.Collections; import java.util.Objects; -public class StopRollupJobAction extends Action { +public class StopRollupJobAction extends Action { public static final StopRollupJobAction INSTANCE = new StopRollupJobAction(); public static final String NAME = "cluster:admin/xpack/rollup/stop"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java index a1c865a030651..7c3cd58a7f467 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class ClearRealmCacheAction extends Action { +public class ClearRealmCacheAction extends Action { public static final ClearRealmCacheAction INSTANCE = new ClearRealmCacheAction(); public static final String NAME = "cluster:admin/xpack/security/realm/cache/clear"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java index 7eebd671ab1df..096b5380181fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java @@ -10,7 +10,7 @@ /** * The action for clearing the cache used by native roles that are stored in an index. */ -public class ClearRolesCacheAction extends Action { +public class ClearRolesCacheAction extends Action { public static final ClearRolesCacheAction INSTANCE = new ClearRolesCacheAction(); public static final String NAME = "cluster:admin/xpack/security/roles/cache/clear"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java index 3261ea94f4515..6130f107fb726 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java @@ -10,7 +10,7 @@ /** * Action for deleting a role from the security index */ -public class DeleteRoleAction extends Action { +public class DeleteRoleAction extends Action { public static final DeleteRoleAction INSTANCE = new DeleteRoleAction(); public static final String NAME = "cluster:admin/xpack/security/role/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java index 3489c2493e762..53126440afb9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java @@ -10,7 +10,7 @@ /** * Action to retrieve a role from the security index */ -public class GetRolesAction extends Action { +public class GetRolesAction extends Action { public static final GetRolesAction INSTANCE = new GetRolesAction(); public static final String NAME = "cluster:admin/xpack/security/role/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java index a9aa2c8f29aec..8396625e262ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java @@ -10,7 +10,7 @@ /** * Action for adding a role to the security index */ -public class PutRoleAction extends Action { +public class PutRoleAction extends Action { public static final PutRoleAction INSTANCE = new PutRoleAction(); public static final String NAME = "cluster:admin/xpack/security/role/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java index 065be4638dde9..6057daf959531 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java @@ -11,7 +11,7 @@ * Action for deleting a role-mapping from the * org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ -public class DeleteRoleMappingAction extends Action { +public class DeleteRoleMappingAction extends Action { public static final DeleteRoleMappingAction INSTANCE = new DeleteRoleMappingAction(); public static final String NAME = "cluster:admin/xpack/security/role_mapping/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsAction.java index 12797ed4d2b0f..e1488bf70913e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsAction.java @@ -9,10 +9,10 @@ /** * Action to retrieve one or more role-mappings from X-Pack security - + * * see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ -public class GetRoleMappingsAction extends Action { +public class GetRoleMappingsAction extends Action { public static final GetRoleMappingsAction INSTANCE = new GetRoleMappingsAction(); public static final String NAME = "cluster:admin/xpack/security/role_mapping/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java index 4b9c2d542cce2..9c3068adf127f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java @@ -10,7 +10,7 @@ /** * Action for adding a role to the security index */ -public class PutRoleMappingAction extends Action { +public class PutRoleMappingAction extends Action { public static final PutRoleMappingAction INSTANCE = new PutRoleMappingAction(); public static final String NAME = "cluster:admin/xpack/security/role_mapping/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java index 0cd2235f84384..fca733a3938a7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java @@ -10,7 +10,7 @@ /** * Action for authenticating using SAML assertions */ -public final class SamlAuthenticateAction extends Action { +public final class SamlAuthenticateAction extends Action { public static final String NAME = "cluster:admin/xpack/security/saml/authenticate"; public static final SamlAuthenticateAction INSTANCE = new SamlAuthenticateAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionAction.java index 51ce0f00ee66b..dc5aa09627564 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionAction.java @@ -10,7 +10,7 @@ /** * Action to perform IdP-initiated logout for a SAML-SSO user */ -public final class SamlInvalidateSessionAction extends Action { +public final class SamlInvalidateSessionAction extends Action { public static final String NAME = "cluster:admin/xpack/security/saml/invalidate"; public static final SamlInvalidateSessionAction INSTANCE = new SamlInvalidateSessionAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutAction.java index 8c2cb6f4599d6..9ea3a29ca4ad9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutAction.java @@ -10,7 +10,7 @@ /** * Action for initiating a logout process for a SAML-SSO user */ -public final class SamlLogoutAction extends Action { +public final class SamlLogoutAction extends Action { public static final String NAME = "cluster:admin/xpack/security/saml/logout"; public static final SamlLogoutAction INSTANCE = new SamlLogoutAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java index 10c2eb719983e..12ad23ca50199 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java @@ -10,7 +10,7 @@ /** * Action for initiating an authentication process using SAML assertions */ -public final class SamlPrepareAuthenticationAction extends Action { +public final class SamlPrepareAuthenticationAction extends Action { public static final String NAME = "cluster:admin/xpack/security/saml/prepare"; public static final SamlPrepareAuthenticationAction INSTANCE = new SamlPrepareAuthenticationAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java index 9f2e937151ced..7b913f594e582 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java @@ -10,7 +10,7 @@ /** * Action for creating a new token */ -public final class CreateTokenAction extends Action { +public final class CreateTokenAction extends Action { public static final String NAME = "cluster:admin/xpack/security/token/create"; public static final CreateTokenAction INSTANCE = new CreateTokenAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestBuilder.java index d94744dff373f..90790de7cd395 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestBuilder.java @@ -17,7 +17,7 @@ public final class CreateTokenRequestBuilder extends ActionRequestBuilder { - public CreateTokenRequestBuilder(ElasticsearchClient client, Action action) { + public CreateTokenRequestBuilder(ElasticsearchClient client, Action action) { super(client, action, new CreateTokenRequest()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java index eca864546b222..679ee0756f638 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java @@ -10,7 +10,7 @@ /** * Action for invalidating a given token */ -public final class InvalidateTokenAction extends Action { +public final class InvalidateTokenAction extends Action { public static final String NAME = "cluster:admin/xpack/security/token/invalidate"; public static final InvalidateTokenAction INSTANCE = new InvalidateTokenAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/RefreshTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/RefreshTokenAction.java index c847aa32898d3..3478af2ec00f2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/RefreshTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/RefreshTokenAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public final class RefreshTokenAction extends Action { +public final class RefreshTokenAction extends Action { public static final String NAME = "cluster:admin/xpack/security/token/refresh"; public static final RefreshTokenAction INSTANCE = new RefreshTokenAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java index 05c53063eb16d..18cfe85c8cb0c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class AuthenticateAction extends Action { +public class AuthenticateAction extends Action { public static final String NAME = "cluster:admin/xpack/security/user/authenticate"; public static final AuthenticateAction INSTANCE = new AuthenticateAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java index 23bfff8d80124..d01717a64eadc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class ChangePasswordAction extends Action { +public class ChangePasswordAction extends Action { public static final ChangePasswordAction INSTANCE = new ChangePasswordAction(); public static final String NAME = "cluster:admin/xpack/security/user/change_password"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java index a62381b6ecc6b..78666759dc0a7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java @@ -10,7 +10,7 @@ /** * Action for deleting a native user. */ -public class DeleteUserAction extends Action { +public class DeleteUserAction extends Action { public static final DeleteUserAction INSTANCE = new DeleteUserAction(); public static final String NAME = "cluster:admin/xpack/security/user/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java index 5c9142671f4b2..49532049ba908 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java @@ -10,7 +10,7 @@ /** * Action for retrieving a user from the security index */ -public class GetUsersAction extends Action { +public class GetUsersAction extends Action { public static final GetUsersAction INSTANCE = new GetUsersAction(); public static final String NAME = "cluster:admin/xpack/security/user/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java index 5db27db93ec91..30bb44a2c1c33 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java @@ -12,7 +12,7 @@ * This action is testing whether a user has the specified * {@link RoleDescriptor.IndicesPrivileges privileges} */ -public class HasPrivilegesAction extends Action { +public class HasPrivilegesAction extends Action { public static final HasPrivilegesAction INSTANCE = new HasPrivilegesAction(); public static final String NAME = "cluster:admin/xpack/security/user/has_privileges"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java index 6009f89e69f40..20bbde2366b5e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java @@ -10,7 +10,7 @@ /** * Action for putting (adding/updating) a native user. */ -public class PutUserAction extends Action { +public class PutUserAction extends Action { public static final PutUserAction INSTANCE = new PutUserAction(); public static final String NAME = "cluster:admin/xpack/security/user/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java index ec010cc17a9b2..0368cdf7d7dbc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java @@ -10,7 +10,7 @@ /** * This action is for setting the enabled flag on a native or reserved user */ -public class SetEnabledAction extends Action { +public class SetEnabledAction extends Action { public static final SetEnabledAction INSTANCE = new SetEnabledAction(); public static final String NAME = "cluster:admin/xpack/security/user/set_enabled"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java index e4115887f66a9..4e1a84773db7d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java @@ -25,7 +25,7 @@ * Action to obtain information about X.509 (SSL/TLS) certificates that are being used by X-Pack. * The primary use case is for tracking the expiry dates of certificates. */ -public class GetCertificateInfoAction extends Action { +public class GetCertificateInfoAction extends Action { public static final GetCertificateInfoAction INSTANCE = new GetCertificateInfoAction(); public static final String NAME = "cluster:monitor/xpack/ssl/certificates/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java index 781cfe4d4d227..84a643ae72dae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java @@ -26,7 +26,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xpack.core.upgrade.IndexUpgradeServiceFields.UPGRADE_INDEX_OPTIONS; -public class IndexUpgradeAction extends Action { +public class IndexUpgradeAction extends Action { public static final IndexUpgradeAction INSTANCE = new IndexUpgradeAction(); public static final String NAME = "cluster:admin/xpack/upgrade"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java index bfa6de10b1b28..f17dfbdb90b9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java @@ -26,7 +26,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class IndexUpgradeInfoAction extends Action { +public class IndexUpgradeInfoAction extends Action { public static final IndexUpgradeInfoAction INSTANCE = new IndexUpgradeInfoAction(); public static final String NAME = "cluster:admin/xpack/upgrade/info"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java index d6a7259087462..04ec95a369af2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java @@ -10,7 +10,7 @@ /** * This action acks a watch in memory, and the index */ -public class AckWatchAction extends Action { +public class AckWatchAction extends Action { public static final AckWatchAction INSTANCE = new AckWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/ack"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java index 8f6f10ced9097..936a21711547e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java @@ -10,7 +10,7 @@ /** * This action acks a watch in memory, and the index */ -public class ActivateWatchAction extends Action { +public class ActivateWatchAction extends Action { public static final ActivateWatchAction INSTANCE = new ActivateWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/activate"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java index e40d6876f1c9f..8a16755a6dbce 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java @@ -10,7 +10,7 @@ /** * This action deletes an watch from in memory, the scheduler and the index */ -public class DeleteWatchAction extends Action { +public class DeleteWatchAction extends Action { public static final DeleteWatchAction INSTANCE = new DeleteWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java index 5baa021d6f170..924f170959426 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java @@ -11,7 +11,7 @@ * This action executes a watch, either ignoring the schedule and condition or just the schedule and can execute a subset of the actions, * optionally persisting the history entry */ -public class ExecuteWatchAction extends Action { +public class ExecuteWatchAction extends Action { public static final ExecuteWatchAction INSTANCE = new ExecuteWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/execute"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java index c411c0dbeb3f9..4df72a964b65f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java @@ -8,7 +8,7 @@ /** * This action gets an watch by name */ -public class GetWatchAction extends org.elasticsearch.action.Action { +public class GetWatchAction extends org.elasticsearch.action.Action { public static final GetWatchAction INSTANCE = new GetWatchAction(); public static final String NAME = "cluster:monitor/xpack/watcher/watch/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java index faf2faae182f9..56cedc457bda7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java @@ -10,7 +10,7 @@ /** * This action puts an watch into the watch index and adds it to the scheduler */ -public class PutWatchAction extends Action { +public class PutWatchAction extends Action { public static final PutWatchAction INSTANCE = new PutWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java index a9682b2946d9a..0846bd10a80ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java @@ -8,7 +8,7 @@ import org.elasticsearch.action.Action; -public class WatcherServiceAction extends Action { +public class WatcherServiceAction extends Action { public static final WatcherServiceAction INSTANCE = new WatcherServiceAction(); public static final String NAME = "cluster:admin/xpack/watcher/service"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java index 330b6ace97797..59fcff090f59e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java @@ -10,7 +10,7 @@ /** * This Action gets the stats for the watcher plugin */ -public class WatcherStatsAction extends Action { +public class WatcherStatsAction extends Action { public static final WatcherStatsAction INSTANCE = new WatcherStatsAction(); public static final String NAME = "cluster:monitor/xpack/watcher/stats/dist"; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java index 672f4507dc814..2310afe4f77e7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java @@ -71,7 +71,7 @@ public String getName() { protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client)throws IOException { try (XContentParser parser = request.contentParser()) { final CreateTokenRequest tokenRequest = PARSER.parse(parser, null); - final Action action = + final Action action = "refresh_token".equals(tokenRequest.getGrantType()) ? RefreshTokenAction.INSTANCE : CreateTokenAction.INSTANCE; return channel -> client.execute(action, tokenRequest, // this doesn't use the RestBuilderListener since we need to override the diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 15361d997a161..76d888d2c2e61 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -127,7 +127,7 @@ public void setup() throws Exception { protected > - void doExecute(Action action, Request request, ActionListener listener) { + void doExecute(Action action, Request request, ActionListener listener) { if (IndexAction.NAME.equals(action.name())) { assertThat(request, instanceOf(IndexRequest.class)); IndexRequest indexRequest = (IndexRequest) request; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java index 1a5adc2e5ef44..c17134093c593 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java @@ -72,7 +72,7 @@ class IClient extends FilterClient { @Override protected > void doExecute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { clientCalled.set(true); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java index ee570dcadda31..3d739d57f480c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java @@ -77,7 +77,7 @@ public void setupMocks() { Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder > void doExecute( - Action action, + Action action, Request request, ActionListener listener) { requests.add(new Tuple<>(request, listener)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index 7754d387f1527..928c9bbd1b143 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -66,7 +66,7 @@ public class SecurityIndexManagerTests extends ESTestCase { public static final String INDEX_NAME = ".security"; private static final String TEMPLATE_NAME = "SecurityIndexManagerTests-template"; private SecurityIndexManager manager; - private Map, Map>> actions; + private Map, Map>> actions; @Before public void setUpManager() { @@ -83,7 +83,7 @@ public void setUpManager() { protected > - void doExecute(Action action, Request request, + void doExecute(Action action, Request request, ActionListener listener) { final Map> map = actions.getOrDefault(action, new HashMap<>()); map.put(request, listener); diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java index 2c6daf2c4aef5..0908af76bebee 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class SqlClearCursorAction extends Action { +public class SqlClearCursorAction extends Action { public static final SqlClearCursorAction INSTANCE = new SqlClearCursorAction(); public static final String NAME = "indices:data/read/sql/close_cursor"; diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java index 82b233968d981..5b9a5b1c3ef47 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class SqlQueryAction extends Action { +public class SqlQueryAction extends Action { public static final SqlQueryAction INSTANCE = new SqlQueryAction(); public static final String NAME = "indices:data/read/sql"; diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateAction.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateAction.java index 3d44d26264eb3..978a11fbbb645 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateAction.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateAction.java @@ -10,7 +10,7 @@ /** * Sql action for translating SQL queries into ES requests */ -public class SqlTranslateAction extends Action { +public class SqlTranslateAction extends Action { public static final SqlTranslateAction INSTANCE = new SqlTranslateAction(); public static final String NAME = "indices:data/read/sql/translate"; From 04e4e44409d8fe2d98793f8f137892e39a113c1f Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Tue, 19 Jun 2018 14:21:11 +0200 Subject: [PATCH 26/92] Add get stored script and delete stored script to high level REST API (#31355) Add get stored script and delete stored script to high level REST API Relates to #27205 --- .../client/RequestConverters.java | 19 ++ .../client/RestHighLevelClient.java | 60 ++++++ .../client/RequestConvertersTests.java | 28 +++ .../elasticsearch/client/StoredScriptsIT.java | 105 +++++++++ .../StoredScriptsDocumentationIT.java | 204 ++++++++++++++++++ .../high-level/script/delete_script.asciidoc | 81 +++++++ .../high-level/script/get_script.asciidoc | 77 +++++++ .../high-level/supported-apis.asciidoc | 11 + .../rest-api-spec/api/get_script.json | 4 + .../DeleteStoredScriptResponse.java | 5 + .../GetStoredScriptResponse.java | 84 +++++++- .../TransportGetStoredScriptAction.java | 2 +- .../cluster/RestGetStoredScriptAction.java | 42 +--- .../script/StoredScriptSource.java | 6 +- .../DeleteStoredScriptResponseTests.java | 46 ++++ .../GetStoredScriptResponseTests.java | 61 ++++++ 16 files changed, 789 insertions(+), 46 deletions(-) create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java create mode 100644 docs/java-rest/high-level/script/delete_script.asciidoc create mode 100644 docs/java-rest/high-level/script/get_script.asciidoc create mode 100644 server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponseTests.java create mode 100644 server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponseTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index ab85af9f1fd7e..9d4582494eb91 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -37,6 +37,8 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; @@ -892,6 +894,23 @@ static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) t return request; } + static Request getScript(GetStoredScriptRequest getStoredScriptRequest) { + String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + Params params = new Params(request); + params.withMasterTimeout(getStoredScriptRequest.masterNodeTimeout()); + return request; + } + + static Request deleteScript(DeleteStoredScriptRequest deleteStoredScriptRequest) { + String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(deleteStoredScriptRequest.id()).build(); + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + Params params = new Params(request); + params.withTimeout(deleteStoredScriptRequest.timeout()); + params.withMasterTimeout(deleteStoredScriptRequest.masterNodeTimeout()); + return request; + } + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 536b85925a4ba..6905cfdb8f714 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -26,6 +26,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; @@ -652,6 +656,62 @@ public final FieldCapabilitiesResponse fieldCaps(FieldCapabilitiesRequest fieldC FieldCapabilitiesResponse::fromXContent, emptySet()); } + /** + * Get stored script by id. + * See + * How to use scripts on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetStoredScriptResponse getScript(GetStoredScriptRequest request, RequestOptions options) throws IOException { + return performRequestAndParseEntity(request, RequestConverters::getScript, options, + GetStoredScriptResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously get stored script by id. + * See + * How to use scripts on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void getScriptAsync(GetStoredScriptRequest request, RequestOptions options, + ActionListener listener) { + performRequestAsyncAndParseEntity(request, RequestConverters::getScript, options, + GetStoredScriptResponse::fromXContent, listener, emptySet()); + } + + /** + * Delete stored script by id. + * See + * How to use scripts on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public DeleteStoredScriptResponse deleteScript(DeleteStoredScriptRequest request, RequestOptions options) throws IOException { + return performRequestAndParseEntity(request, RequestConverters::deleteScript, options, + DeleteStoredScriptResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously delete stored script by id. + * See + * How to use scripts on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void deleteScriptAsync(DeleteStoredScriptRequest request, RequestOptions options, + ActionListener listener) { + performRequestAsyncAndParseEntity(request, RequestConverters::deleteScript, options, + DeleteStoredScriptResponse::fromXContent, listener, emptySet()); + } + /** * Asynchronously executes a request using the Field Capabilities API. * See Field Capabilities API diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 60f427b490462..e7d56a4332b82 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -37,6 +37,8 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; @@ -1948,6 +1950,32 @@ public void testGetTemplateRequest() throws Exception { assertThat(request.getEntity(), nullValue()); } + public void testGetScriptRequest() { + GetStoredScriptRequest getStoredScriptRequest = new GetStoredScriptRequest("x-script"); + Map expectedParams = new HashMap<>(); + setRandomMasterTimeout(getStoredScriptRequest, expectedParams); + + Request request = RequestConverters.getScript(getStoredScriptRequest); + assertThat(request.getEndpoint(), equalTo("/_scripts/" + getStoredScriptRequest.id())); + assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); + assertThat(request.getParameters(), equalTo(expectedParams)); + assertThat(request.getEntity(), nullValue()); + } + + public void testDeleteScriptRequest() { + DeleteStoredScriptRequest deleteStoredScriptRequest = new DeleteStoredScriptRequest("x-script"); + + Map expectedParams = new HashMap<>(); + setRandomTimeout(deleteStoredScriptRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + setRandomMasterTimeout(deleteStoredScriptRequest, expectedParams); + + Request request = RequestConverters.deleteScript(deleteStoredScriptRequest); + assertThat(request.getEndpoint(), equalTo("/_scripts/" + deleteStoredScriptRequest.id())); + assertThat(request.getMethod(), equalTo(HttpDelete.METHOD_NAME)); + assertThat(request.getParameters(), equalTo(expectedParams)); + assertThat(request.getEntity(), nullValue()); + } + private static void assertToXContentBody(ToXContent expectedBody, HttpEntity actualEntity) throws IOException { BytesReference expectedBytes = XContentHelper.toXContent(expectedBody, REQUEST_BODY_CONTENT_TYPE, false); assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType().getValue()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java new file mode 100644 index 0000000000000..e6d380a4cc0e1 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java @@ -0,0 +1,105 @@ +package org.elasticsearch.client;/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.StoredScriptSource; + +import java.util.Collections; + +import static java.util.Collections.emptyMap; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; + +public class StoredScriptsIT extends ESRestHighLevelClientTestCase { + + final String id = "calculate-score"; + + public void testGetStoredScript() throws Exception { + final StoredScriptSource scriptSource = + new StoredScriptSource("painless", + "Math.log(_score * 2) + params.my_modifier", + Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType())); + + final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); + // TODO: change to HighLevel PutStoredScriptRequest when it will be ready + // so far - using low-level REST API + Response putResponse = + adminClient() + .performRequest("PUT", "/_scripts/calculate-score", emptyMap(), + new StringEntity("{\"script\":" + script + "}", + ContentType.APPLICATION_JSON)); + assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode()); + assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity())); + + GetStoredScriptRequest getRequest = new GetStoredScriptRequest("calculate-score"); + getRequest.masterNodeTimeout("50s"); + + GetStoredScriptResponse getResponse = execute(getRequest, highLevelClient()::getScript, + highLevelClient()::getScriptAsync); + + assertThat(getResponse.getSource(), equalTo(scriptSource)); + } + + public void testDeleteStoredScript() throws Exception { + final StoredScriptSource scriptSource = + new StoredScriptSource("painless", + "Math.log(_score * 2) + params.my_modifier", + Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType())); + + final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); + // TODO: change to HighLevel PutStoredScriptRequest when it will be ready + // so far - using low-level REST API + Response putResponse = + adminClient() + .performRequest("PUT", "/_scripts/" + id, emptyMap(), + new StringEntity("{\"script\":" + script + "}", + ContentType.APPLICATION_JSON)); + assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode()); + assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity())); + + DeleteStoredScriptRequest deleteRequest = new DeleteStoredScriptRequest(id); + deleteRequest.masterNodeTimeout("50s"); + deleteRequest.timeout("50s"); + + DeleteStoredScriptResponse deleteResponse = execute(deleteRequest, highLevelClient()::deleteScript, + highLevelClient()::deleteScriptAsync); + + assertThat(deleteResponse.isAcknowledged(), equalTo(true)); + + GetStoredScriptRequest getRequest = new GetStoredScriptRequest(id); + + final ElasticsearchStatusException statusException = expectThrows(ElasticsearchStatusException.class, + () -> execute(getRequest, highLevelClient()::getScript, + highLevelClient()::getScriptAsync)); + assertThat(statusException.status(), equalTo(RestStatus.NOT_FOUND)); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java new file mode 100644 index 0000000000000..0aadae73ce66d --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java @@ -0,0 +1,204 @@ +package org.elasticsearch.client.documentation;/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.StoredScriptSource; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static java.util.Collections.emptyMap; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; + +/** + * This class is used to generate the Java Stored Scripts API documentation. + * You need to wrap your code between two tags like: + * // tag::example + * // end::example + * + * Where example is your tag name. + * + * Then in the documentation, you can extract what is between tag and end tags with + * ["source","java",subs="attributes,callouts,macros"] + * -------------------------------------------------- + * include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[example] + * -------------------------------------------------- + * + * The column width of the code block is 84. If the code contains a line longer + * than 84, the line will be cut and a horizontal scroll bar will be displayed. + * (the code indentation of the tag is not included in the width) + */ +public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase { + + public void testGetStoredScript() throws Exception { + RestHighLevelClient client = highLevelClient(); + + final StoredScriptSource scriptSource = + new StoredScriptSource("painless", + "Math.log(_score * 2) + params.my_modifier", + Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType())); + + putStoredScript("calculate-score", scriptSource); + + { + // tag::get-stored-script-request + GetStoredScriptRequest request = new GetStoredScriptRequest("calculate-score"); // <1> + // end::get-stored-script-request + + // tag::get-stored-script-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueSeconds(50)); // <1> + request.masterNodeTimeout("50s"); // <2> + // end::get-stored-script-request-masterTimeout + + // tag::get-stored-script-execute + GetStoredScriptResponse getResponse = client.getScript(request, RequestOptions.DEFAULT); + // end::get-stored-script-execute + + // tag::get-stored-script-response + StoredScriptSource storedScriptSource = getResponse.getSource(); // <1> + + String lang = storedScriptSource.getLang(); // <2> + String source = storedScriptSource.getSource(); // <3> + Map options = storedScriptSource.getOptions(); // <4> + // end::get-stored-script-response + + assertThat(storedScriptSource, equalTo(scriptSource)); + + // tag::get-stored-script-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetStoredScriptResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-stored-script-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::get-stored-script-execute-async + client.getScriptAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::get-stored-script-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + } + + public void testDeleteStoredScript() throws Exception { + RestHighLevelClient client = highLevelClient(); + + final StoredScriptSource scriptSource = + new StoredScriptSource("painless", + "Math.log(_score * 2) + params.my_modifier", + Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType())); + + putStoredScript("calculate-score", scriptSource); + + // tag::delete-stored-script-request + DeleteStoredScriptRequest deleteRequest = new DeleteStoredScriptRequest("calculate-score"); // <1> + // end::delete-stored-script-request + + // tag::delete-stored-script-request-masterTimeout + deleteRequest.masterNodeTimeout(TimeValue.timeValueSeconds(50)); // <1> + deleteRequest.masterNodeTimeout("50s"); // <2> + // end::delete-stored-script-request-masterTimeout + + // tag::delete-stored-script-request-timeout + deleteRequest.timeout(TimeValue.timeValueSeconds(60)); // <1> + deleteRequest.timeout("60s"); // <2> + // end::delete-stored-script-request-timeout + + // tag::delete-stored-script-execute + DeleteStoredScriptResponse deleteResponse = client.deleteScript(deleteRequest, RequestOptions.DEFAULT); + // end::delete-stored-script-execute + + // tag::delete-stored-script-response + boolean acknowledged = deleteResponse.isAcknowledged();// <1> + // end::delete-stored-script-response + + putStoredScript("calculate-score", scriptSource); + + // tag::delete-stored-script-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(DeleteStoredScriptResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::delete-stored-script-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::delete-stored-script-execute-async + client.deleteScriptAsync(deleteRequest, RequestOptions.DEFAULT, listener); // <1> + // end::delete-stored-script-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + private void putStoredScript(String id, StoredScriptSource scriptSource) throws IOException { + final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); + // TODO: change to HighLevel PutStoredScriptRequest when it will be ready + // so far - using low-level REST API + Response putResponse = + adminClient() + .performRequest("PUT", "/_scripts/" + id, emptyMap(), + new StringEntity("{\"script\":" + script + "}", + ContentType.APPLICATION_JSON)); + assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode()); + assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity())); + } +} diff --git a/docs/java-rest/high-level/script/delete_script.asciidoc b/docs/java-rest/high-level/script/delete_script.asciidoc new file mode 100644 index 0000000000000..79b3b0b324715 --- /dev/null +++ b/docs/java-rest/high-level/script/delete_script.asciidoc @@ -0,0 +1,81 @@ +[[java-rest-high-delete-stored-script]] + +=== Delete Stored Script API + +[[java-rest-high-delete-stored-script-request]] +==== Delete Stored Script Request + +A `DeleteStoredScriptRequest` requires an `id`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-request] +-------------------------------------------------- +<1> The id of the script + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-request-timeout] +-------------------------------------------------- +<1> Timeout to wait for the all the nodes to acknowledge the stored script is deleted as a `TimeValue` +<2> Timeout to wait for the all the nodes to acknowledge the stored script is deleted as a `String` + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue` +<2> Timeout to connect to the master node as a `String` + +[[java-rest-high-delete-stored-script-sync]] +==== Synchronous Execution +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-execute] +-------------------------------------------------- + +[[java-rest-high-delete-stored-script-async]] +==== Asynchronous Execution + +The asynchronous execution of a delete stored script request requires both the `DeleteStoredScriptRequest` +instance and an `ActionListener` instance to be passed to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-execute-async] +-------------------------------------------------- +<1> The `DeleteStoredScriptRequest` to execute and the `ActionListener` to use when +the execution completes + +[[java-rest-high-delete-stored-script-listener]] +===== Action Listener + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `DeleteStoredScriptResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-delete-stored-script-response]] +==== Delete Stored Script Response + +The returned `DeleteStoredScriptResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-response] +-------------------------------------------------- +<1> Indicates whether all of the nodes have acknowledged the request \ No newline at end of file diff --git a/docs/java-rest/high-level/script/get_script.asciidoc b/docs/java-rest/high-level/script/get_script.asciidoc new file mode 100644 index 0000000000000..a38bdad2bd6af --- /dev/null +++ b/docs/java-rest/high-level/script/get_script.asciidoc @@ -0,0 +1,77 @@ +[[java-rest-high-get-stored-script]] + +=== Get Stored Script API + +[[java-rest-high-get-stored-script-request]] +==== Get Stored Script Request + +A `GetStoredScriptRequest` requires an `id`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-request] +-------------------------------------------------- +<1> The id of the script + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue` +<2> Timeout to connect to the master node as a `String` + +[[java-rest-high-get-stored-script-sync]] +==== Synchronous Execution +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-execute] +-------------------------------------------------- + +[[java-rest-high-get-stored-script-async]] +==== Asynchronous Execution + +The asynchronous execution of a get stored script request requires both the `GetStoredScriptRequest` +instance and an `ActionListener` instance to be passed to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-execute-async] +-------------------------------------------------- +<1> The `GetStoredScriptRequest` to execute and the `ActionListener` to use when +the execution completes + +[[java-rest-high-get-stored-script-listener]] +===== Action Listener + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `GetStoredScriptResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-get-stored-script-response]] +==== Get Stored Script Response + +The returned `GetStoredScriptResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-response] +-------------------------------------------------- +<1> The script object consists of a content and a metadata +<2> The language the script is written in, which defaults to `painless`. +<3> The content of the script +<4> Any named options that should be passed into the script. \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 4cd87a521d104..17acc8f13c04d 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -151,3 +151,14 @@ The Java High Level REST Client supports the following Tasks APIs: include::tasks/list_tasks.asciidoc[] include::tasks/cancel_tasks.asciidoc[] + +== Script APIs + +The Java High Level REST Client supports the following Scripts APIs: + +* <> +* <> + +include::script/get_script.asciidoc[] +include::script/delete_script.asciidoc[] + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json index 2240f0e1a0b75..0b2d6c5a5b9c2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json @@ -13,6 +13,10 @@ } }, "params" : { + "master_timeout": { + "type" : "time", + "description" : "Specify timeout for connection to master" + } } }, "body": null diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponse.java index 42f08ae73e06d..741c105866f46 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponse.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.cluster.storedscripts; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.xcontent.XContentParser; public class DeleteStoredScriptResponse extends AcknowledgedResponse { @@ -29,4 +30,8 @@ public class DeleteStoredScriptResponse extends AcknowledgedResponse { public DeleteStoredScriptResponse(boolean acknowledged) { super(acknowledged); } + + public static DeleteStoredScriptResponse fromXContent(XContentParser parser) { + return new DeleteStoredScriptResponse(parseAcknowledged(parser)); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java index a394fe17f217f..7bfbbe7ad4d7f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java @@ -21,25 +21,63 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.StoredScriptSource; import java.io.IOException; +import java.util.Objects; -public class GetStoredScriptResponse extends ActionResponse implements ToXContentObject { +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +public class GetStoredScriptResponse extends ActionResponse implements StatusToXContentObject { + + public static final ParseField _ID_PARSE_FIELD = new ParseField("_id"); + public static final ParseField FOUND_PARSE_FIELD = new ParseField("found"); + public static final ParseField SCRIPT = new ParseField("script"); + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("GetStoredScriptResponse", + true, + (a, c) -> { + String id = (String) a[0]; + boolean found = (Boolean)a[1]; + StoredScriptSource scriptSource = (StoredScriptSource)a[2]; + return found ? new GetStoredScriptResponse(id, scriptSource) : new GetStoredScriptResponse(id, null); + }); + + static { + PARSER.declareField(constructorArg(), (p, c) -> p.text(), + _ID_PARSE_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField(constructorArg(), (p, c) -> p.booleanValue(), + FOUND_PARSE_FIELD, ObjectParser.ValueType.BOOLEAN); + PARSER.declareField(optionalConstructorArg(), (p, c) -> StoredScriptSource.fromXContent(p, true), + SCRIPT, ObjectParser.ValueType.OBJECT); + } + + private String id; private StoredScriptSource source; GetStoredScriptResponse() { } - GetStoredScriptResponse(StoredScriptSource source) { + GetStoredScriptResponse(String id, StoredScriptSource source) { + this.id = id; this.source = source; } + public String getId() { + return id; + } + /** * @return if a stored script and if not found null */ @@ -47,13 +85,30 @@ public StoredScriptSource getSource() { return source; } + @Override + public RestStatus status() { + return source != null ? RestStatus.OK : RestStatus.NOT_FOUND; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - source.toXContent(builder, params); + builder.startObject(); + + builder.field(_ID_PARSE_FIELD.getPreferredName(), id); + builder.field(FOUND_PARSE_FIELD.getPreferredName(), source != null); + if (source != null) { + builder.field(StoredScriptSource.SCRIPT_PARSE_FIELD.getPreferredName()); + source.toXContent(builder, params); + } + builder.endObject(); return builder; } + public static GetStoredScriptResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -67,6 +122,10 @@ public void readFrom(StreamInput in) throws IOException { } else { source = null; } + + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + id = in.readString(); + } } @Override @@ -84,5 +143,22 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(source.getSource()); } } + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeString(id); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetStoredScriptResponse that = (GetStoredScriptResponse) o; + return Objects.equals(id, that.id) && + Objects.equals(source, that.source); + } + + @Override + public int hashCode() { + return Objects.hash(id, source); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportGetStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportGetStoredScriptAction.java index 63f24f31f59bd..368e40b96b7b3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportGetStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportGetStoredScriptAction.java @@ -60,7 +60,7 @@ protected GetStoredScriptResponse newResponse() { @Override protected void masterOperation(GetStoredScriptRequest request, ClusterState state, ActionListener listener) throws Exception { - listener.onResponse(new GetStoredScriptResponse(scriptService.getStoredScript(state, request))); + listener.onResponse(new GetStoredScriptResponse(request.id(), scriptService.getStoredScript(state, request))); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java index 10050dda88235..1a14d50538237 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java @@ -19,19 +19,12 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; -import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.rest.action.RestBuilderListener; -import org.elasticsearch.script.StoredScriptSource; +import org.elasticsearch.rest.action.RestStatusToXContentListener; import java.io.IOException; @@ -39,9 +32,6 @@ public class RestGetStoredScriptAction extends BaseRestHandler { - public static final ParseField _ID_PARSE_FIELD = new ParseField("_id"); - public static final ParseField FOUND_PARSE_FIELD = new ParseField("found"); - public RestGetStoredScriptAction(Settings settings, RestController controller) { super(settings); @@ -57,33 +47,7 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, NodeClient client) throws IOException { String id = request.param("id"); GetStoredScriptRequest getRequest = new GetStoredScriptRequest(id); - - return channel -> client.admin().cluster().getStoredScript(getRequest, new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(GetStoredScriptResponse response, XContentBuilder builder) throws Exception { - builder.startObject(); - builder.field(_ID_PARSE_FIELD.getPreferredName(), id); - - StoredScriptSource source = response.getSource(); - boolean found = source != null; - builder.field(FOUND_PARSE_FIELD.getPreferredName(), found); - - if (found) { - builder.startObject(StoredScriptSource.SCRIPT_PARSE_FIELD.getPreferredName()); - builder.field(StoredScriptSource.LANG_PARSE_FIELD.getPreferredName(), source.getLang()); - builder.field(StoredScriptSource.SOURCE_PARSE_FIELD.getPreferredName(), source.getSource()); - - if (source.getOptions().isEmpty() == false) { - builder.field(StoredScriptSource.OPTIONS_PARSE_FIELD.getPreferredName(), source.getOptions()); - } - - builder.endObject(); - } - - builder.endObject(); - - return new BytesRestResponse(found ? RestStatus.OK : RestStatus.NOT_FOUND, builder); - } - }); + getRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getRequest.masterNodeTimeout())); + return channel -> client.admin().cluster().getStoredScript(getRequest, new RestStatusToXContentListener<>(channel)); } } diff --git a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java index 11f8769c86b1f..885d72bdec6f5 100644 --- a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java +++ b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java @@ -185,7 +185,7 @@ private StoredScriptSource build(boolean ignoreEmpty) { } } - private static final ObjectParser PARSER = new ObjectParser<>("stored script source", Builder::new); + private static final ObjectParser PARSER = new ObjectParser<>("stored script source", true, Builder::new); static { // Defines the fields necessary to parse a Script as XContent using an ObjectParser. @@ -481,7 +481,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field(LANG_PARSE_FIELD.getPreferredName(), lang); builder.field(SOURCE_PARSE_FIELD.getPreferredName(), source); - builder.field(OPTIONS_PARSE_FIELD.getPreferredName(), options); + if (options.isEmpty() == false) { + builder.field(OPTIONS_PARSE_FIELD.getPreferredName(), options); + } builder.endObject(); return builder; diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponseTests.java new file mode 100644 index 0000000000000..375a672263060 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponseTests.java @@ -0,0 +1,46 @@ +package org.elasticsearch.action.admin.cluster.storedscripts;/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; + +public class DeleteStoredScriptResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected DeleteStoredScriptResponse doParseInstance(XContentParser parser) throws IOException { + return DeleteStoredScriptResponse.fromXContent(parser); + } + + @Override + protected DeleteStoredScriptResponse createBlankInstance() { + return new DeleteStoredScriptResponse(); + } + + @Override + protected DeleteStoredScriptResponse createTestInstance() { + return new DeleteStoredScriptResponse(randomBoolean()); + } + + @Override + protected DeleteStoredScriptResponse mutateInstance(DeleteStoredScriptResponse instance) throws IOException { + return new DeleteStoredScriptResponse(instance.isAcknowledged() == false); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponseTests.java new file mode 100644 index 0000000000000..1c92c0c8c2bf7 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponseTests.java @@ -0,0 +1,61 @@ +package org.elasticsearch.action.admin.cluster.storedscripts;/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.StoredScriptSource; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.function.Predicate; + +public class GetStoredScriptResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected GetStoredScriptResponse doParseInstance(XContentParser parser) throws IOException { + return GetStoredScriptResponse.fromXContent(parser); + } + + @Override + protected GetStoredScriptResponse createBlankInstance() { + return new GetStoredScriptResponse(); + } + + @Override + protected GetStoredScriptResponse createTestInstance() { + return new GetStoredScriptResponse(randomAlphaOfLengthBetween(1, 10), randomScriptSource()); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return s -> "script.options".equals(s); + } + + private static StoredScriptSource randomScriptSource() { + final String lang = randomFrom("lang", "painless", "mustache"); + final String source = randomAlphaOfLengthBetween(1, 10); + final Map options = randomBoolean() + ? Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()) + : Collections.emptyMap(); + return new StoredScriptSource(lang, source, options); + } +} From 2396cbd449ce1e811ca43f29042151c5f8f5d79a Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Tue, 19 Jun 2018 14:21:11 +0200 Subject: [PATCH 27/92] Add get stored script and delete stored script to high level REST API - post backport fix Relates to #27205 --- .../admin/cluster/storedscripts/GetStoredScriptResponse.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java index 7bfbbe7ad4d7f..4cf686b9c282c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java @@ -123,7 +123,7 @@ public void readFrom(StreamInput in) throws IOException { source = null; } - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { id = in.readString(); } } @@ -143,7 +143,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(source.getSource()); } } - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeString(id); } } From a5540ba19c8364289b699de5262e34c1a30f01af Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 19 Jun 2018 15:43:31 +0200 Subject: [PATCH 28/92] Make release notes ignore the `>test-failure` label. (#31309) This label is uninteresting for release notes. --- dev-tools/es_release_notes.pl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-tools/es_release_notes.pl b/dev-tools/es_release_notes.pl index 265df91544038..93a4ba74f643b 100755 --- a/dev-tools/es_release_notes.pl +++ b/dev-tools/es_release_notes.pl @@ -32,7 +32,7 @@ ">enhancement", ">bug", ">regression", ">upgrade" ); my %Ignore = map { $_ => 1 } - ( ">non-issue", ">refactoring", ">docs", ">test", ":Core/Build" ); + ( ">non-issue", ">refactoring", ">docs", ">test", ">test-failure", ":Core/Build" ); my %Group_Labels = ( '>breaking' => 'Breaking changes', From c0961b79be3350e00869202f40b88ee39834833a Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Jun 2018 07:21:58 -0700 Subject: [PATCH 29/92] Docs: Add note about removing prepareExecute from the java client (#31401) relates #30966 --- docs/reference/migration/migrate_7_0/java.asciidoc | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/reference/migration/migrate_7_0/java.asciidoc b/docs/reference/migration/migrate_7_0/java.asciidoc index a686ba0bfbfbc..169943a16ac03 100644 --- a/docs/reference/migration/migrate_7_0/java.asciidoc +++ b/docs/reference/migration/migrate_7_0/java.asciidoc @@ -5,4 +5,10 @@ `isShardsAcked` has been replaced by `isShardsAcknowledged` in `CreateIndexResponse`, `RolloverResponse` and -`CreateIndexClusterStateUpdateResponse`. \ No newline at end of file +`CreateIndexClusterStateUpdateResponse`. + +==== `prepareExecute` removed from the client api + +The `prepareExecute` method which created a request builder has been +removed from the client api. Instead, construct a builder for the +appropriate request directly. From 73c182ce089a2f467141b707a3db41e3b972897b Mon Sep 17 00:00:00 2001 From: markharwood Date: Tue, 19 Jun 2018 15:51:59 +0100 Subject: [PATCH 30/92] Mute HttpExporterTests#testHttpExporterShutdown test Tracked by #31433 --- .../xpack/monitoring/exporter/http/HttpExporterTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java index 2c8c700fcf615..ff83621119ef6 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java @@ -460,6 +460,7 @@ public void testHttpExporter() throws Exception { } } + @AwaitsFix (bugUrl = "https://github.com/elastic/elasticsearch/issues/31433" ) public void testHttpExporterShutdown() throws Exception { final Config config = createConfig(Settings.EMPTY); final RestClient client = mock(RestClient.class); From 5236d0291e7eee0a40f71b1d94c59bc1a31a5cb0 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 19 Jun 2018 11:15:50 -0400 Subject: [PATCH 31/92] Docs: Advice for reindexing many indices (#31279) Folks tend to want to be able to make a single `_reindex` call to migrate many indices. You *can* do that and we even have an example of how to do that in the docs but it isn't always a good idea. This change adds some advice to the docs: generally you want to make one reindex call per index. Closes #22920 --- docs/reference/docs/reindex.asciidoc | 31 ++++++++++++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index bdbffb0a08d5d..c04bbd6813795 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -1028,11 +1028,38 @@ number of slices. Whether query or indexing performance dominates the runtime depends on the documents being reindexed and cluster resources. +[float] +=== Reindexing many indices +If you have many indices to reindex it is generally better to reindex them +one at a time rather than using a glob pattern to pick up many indices. That +way you can resume the process if there are any errors by removing the +partially completed index and starting over at that index. It also makes +parallelizing the process fairly simple: split the list of indices to reindex +and run each list in parallel. + +One off bash scripts seem to work nicely for this: + +[source,bash] +---------------------------------------------------------------- +for index in i1 i2 i3 i4 i5; do + curl -HContent-Type:application/json -XPOST localhost:9200/_reindex?pretty -d'{ + "source": { + "index": "'$index'" + }, + "dest": { + "index": "'$index'-reindexed" + } + }' +done +---------------------------------------------------------------- +// NOTCONSOLE + [float] === Reindex daily indices -You can use `_reindex` in combination with <> -to reindex daily indices to apply a new template to the existing documents. +Notwithstanding the above advice, you can use `_reindex` in combination with +<> to reindex daily indices to apply +a new template to the existing documents. Assuming you have indices consisting of documents as follows: From ffba20b748c80bfa664be9157bc88c7a2b11b360 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Tue, 19 Jun 2018 09:36:12 -0600 Subject: [PATCH 32/92] Do not preallocate bytes for channel buffer (#31400) Currently, when we open a new channel, we pass it an InboundChannelBuffer. The channel buffer is preallocated a single 16kb page. However, there is no guarantee that this channel will be read from anytime soon. Instead, this commit does not preallocate that page. That page will be allocated when we receive a read event. --- .../elasticsearch/nio/InboundChannelBuffer.java | 1 - .../nio/InboundChannelBufferTests.java | 14 +++++++++++--- .../xpack/security/transport/nio/SSLDriver.java | 1 + 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java b/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java index 7c718237cd20e..f7e6fbb768728 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java @@ -58,7 +58,6 @@ public InboundChannelBuffer(Supplier pageSupplier) { this.pageSupplier = pageSupplier; this.pages = new ArrayDeque<>(); this.capacity = PAGE_SIZE * pages.size(); - ensureCapacity(PAGE_SIZE); } public static InboundChannelBuffer allocatingInstance() { diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/InboundChannelBufferTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/InboundChannelBufferTests.java index 8dd72e869e8d9..162094953d2bb 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/InboundChannelBufferTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/InboundChannelBufferTests.java @@ -34,16 +34,20 @@ public class InboundChannelBufferTests extends ESTestCase { new InboundChannelBuffer.Page(ByteBuffer.allocate(BigArrays.BYTE_PAGE_SIZE), () -> { }); - public void testNewBufferHasSinglePage() { + public void testNewBufferNoPages() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); - assertEquals(PAGE_SIZE, channelBuffer.getCapacity()); - assertEquals(PAGE_SIZE, channelBuffer.getRemaining()); + assertEquals(0, channelBuffer.getCapacity()); + assertEquals(0, channelBuffer.getRemaining()); assertEquals(0, channelBuffer.getIndex()); } public void testExpandCapacity() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); + assertEquals(0, channelBuffer.getCapacity()); + assertEquals(0, channelBuffer.getRemaining()); + + channelBuffer.ensureCapacity(PAGE_SIZE); assertEquals(PAGE_SIZE, channelBuffer.getCapacity()); assertEquals(PAGE_SIZE, channelBuffer.getRemaining()); @@ -56,6 +60,7 @@ public void testExpandCapacity() { public void testExpandCapacityMultiplePages() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); + channelBuffer.ensureCapacity(PAGE_SIZE); assertEquals(PAGE_SIZE, channelBuffer.getCapacity()); @@ -68,6 +73,7 @@ public void testExpandCapacityMultiplePages() { public void testExpandCapacityRespectsOffset() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); + channelBuffer.ensureCapacity(PAGE_SIZE); assertEquals(PAGE_SIZE, channelBuffer.getCapacity()); assertEquals(PAGE_SIZE, channelBuffer.getRemaining()); @@ -87,6 +93,7 @@ public void testExpandCapacityRespectsOffset() { public void testIncrementIndex() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); + channelBuffer.ensureCapacity(PAGE_SIZE); assertEquals(0, channelBuffer.getIndex()); assertEquals(PAGE_SIZE, channelBuffer.getRemaining()); @@ -99,6 +106,7 @@ public void testIncrementIndex() { public void testIncrementIndexWithOffset() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); + channelBuffer.ensureCapacity(PAGE_SIZE); assertEquals(0, channelBuffer.getIndex()); assertEquals(PAGE_SIZE, channelBuffer.getRemaining()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java index c143978468dfd..4080574713cce 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java @@ -365,6 +365,7 @@ private void handshake() throws SSLException { @Override public void read(InboundChannelBuffer buffer) throws SSLException { + ensureApplicationBufferSize(buffer); boolean continueUnwrap = true; while (continueUnwrap && networkReadBuffer.position() > 0) { networkReadBuffer.flip(); From cfb470429ea230e5c7b6e440a5480256e2dd7d6c Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 19 Jun 2018 08:49:32 -0700 Subject: [PATCH 33/92] [DOCS] Add code snippet testing for more ML APIs (#31404) --- x-pack/docs/build.gradle | 3 --- x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc | 3 ++- x-pack/docs/en/rest-api/ml/validate-detector.asciidoc | 3 +-- x-pack/docs/en/rest-api/ml/validate-job.asciidoc | 3 +-- 4 files changed, 4 insertions(+), 8 deletions(-) diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index ed70fcd44a7f2..0d1def2b4f5e5 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -57,7 +57,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/rest-api/license/delete-license.asciidoc', 'en/rest-api/license/update-license.asciidoc', 'en/ml/api-quickref.asciidoc', - 'en/rest-api/ml/delete-calendar-event.asciidoc', 'en/rest-api/ml/delete-snapshot.asciidoc', 'en/rest-api/ml/forecast.asciidoc', 'en/rest-api/ml/get-bucket.asciidoc', @@ -71,8 +70,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/rest-api/ml/preview-datafeed.asciidoc', 'en/rest-api/ml/revert-snapshot.asciidoc', 'en/rest-api/ml/update-snapshot.asciidoc', - 'en/rest-api/ml/validate-detector.asciidoc', - 'en/rest-api/ml/validate-job.asciidoc', 'en/rest-api/watcher/stats.asciidoc', 'en/watcher/example-watches/watching-time-series-data.asciidoc', ] diff --git a/x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc b/x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc index 73458f3179197..ef8dad39dba70 100644 --- a/x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc +++ b/x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc @@ -44,7 +44,7 @@ calendar: DELETE _xpack/ml/calendars/planned-outages/events/LS8LJGEBMTCMA-qz49st -------------------------------------------------- // CONSOLE -// TEST[skip:automatically-generated ID] +// TEST[catch:missing] When the event is removed, you receive the following results: [source,js] @@ -53,3 +53,4 @@ When the event is removed, you receive the following results: "acknowledged": true } ---- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/ml/validate-detector.asciidoc b/x-pack/docs/en/rest-api/ml/validate-detector.asciidoc index f688ef91cfe53..ab8a0de442cf8 100644 --- a/x-pack/docs/en/rest-api/ml/validate-detector.asciidoc +++ b/x-pack/docs/en/rest-api/ml/validate-detector.asciidoc @@ -28,7 +28,6 @@ see <>. You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. -//<>. ==== Examples @@ -45,7 +44,6 @@ POST _xpack/ml/anomaly_detectors/_validate/detector } -------------------------------------------------- // CONSOLE -// TEST[skip:todo] When the validation completes, you receive the following results: [source,js] @@ -54,3 +52,4 @@ When the validation completes, you receive the following results: "acknowledged": true } ---- +// TESTRESPONSE \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/ml/validate-job.asciidoc b/x-pack/docs/en/rest-api/ml/validate-job.asciidoc index 61d0c70514e8d..0ccc5bc04e1d1 100644 --- a/x-pack/docs/en/rest-api/ml/validate-job.asciidoc +++ b/x-pack/docs/en/rest-api/ml/validate-job.asciidoc @@ -28,7 +28,6 @@ see <>. You must have `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. -//<>. ==== Examples @@ -56,7 +55,6 @@ POST _xpack/ml/anomaly_detectors/_validate } -------------------------------------------------- // CONSOLE -// TEST[skip:todo] When the validation is complete, you receive the following results: [source,js] @@ -65,3 +63,4 @@ When the validation is complete, you receive the following results: "acknowledged": true } ---- +// TESTRESPONSE \ No newline at end of file From 40c4bd562844beda9013fde5160143f3bc1f5322 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 19 Jun 2018 19:03:31 +0200 Subject: [PATCH 34/92] Preserve response headers on cluster update task (#31421) #31241 changed the cluster state update tasks to run under system context. The context wrapping did not preserve response headers, though. This has led to a test failure on 6.x #31408, as the deprecation warnings were not carried back anymore to the caller when creating an index. This commit changes the restorable context supplier to preserve response headers. --- .../elasticsearch/cluster/service/MasterService.java | 2 +- .../cluster/service/MasterServiceTests.java | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index 2543be4811c1e..8927adfd43458 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -730,7 +730,7 @@ public void submitStateUpdateTasks(final String source, return; } final ThreadContext threadContext = threadPool.getThreadContext(); - final Supplier supplier = threadContext.newRestorableContext(false); + final Supplier supplier = threadContext.newRestorableContext(true); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { threadContext.markAsSystemContext(); diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index 20587d31f5359..1ef548bd68114 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -54,6 +54,7 @@ import org.junit.BeforeClass; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -177,6 +178,8 @@ public void testThreadContext() throws InterruptedException { try (ThreadContext.StoredContext ignored = threadPool.getThreadContext().stashContext()) { final Map expectedHeaders = Collections.singletonMap("test", "test"); + final Map> expectedResponseHeaders = Collections.singletonMap("testResponse", + Arrays.asList("testResponse")); threadPool.getThreadContext().putHeader(expectedHeaders); final TimeValue ackTimeout = randomBoolean() ? TimeValue.ZERO : TimeValue.timeValueMillis(randomInt(10000)); @@ -187,6 +190,8 @@ public void testThreadContext() throws InterruptedException { public ClusterState execute(ClusterState currentState) { assertTrue(threadPool.getThreadContext().isSystemContext()); assertEquals(Collections.emptyMap(), threadPool.getThreadContext().getHeaders()); + threadPool.getThreadContext().addResponseHeader("testResponse", "testResponse"); + assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); if (randomBoolean()) { return ClusterState.builder(currentState).build(); @@ -201,6 +206,7 @@ public ClusterState execute(ClusterState currentState) { public void onFailure(String source, Exception e) { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); latch.countDown(); } @@ -208,6 +214,7 @@ public void onFailure(String source, Exception e) { public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); latch.countDown(); } @@ -229,6 +236,7 @@ public TimeValue timeout() { public void onAllNodesAcked(@Nullable Exception e) { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); latch.countDown(); } @@ -236,6 +244,7 @@ public void onAllNodesAcked(@Nullable Exception e) { public void onAckTimeout() { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + assertEquals(expectedResponseHeaders, threadPool.getThreadContext().getResponseHeaders()); latch.countDown(); } @@ -243,6 +252,7 @@ public void onAckTimeout() { assertFalse(threadPool.getThreadContext().isSystemContext()); assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + assertEquals(Collections.emptyMap(), threadPool.getThreadContext().getResponseHeaders()); } latch.await(); From 8fd1f5fbed65ede2e904d29255e3d3b12643b7ae Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 19 Jun 2018 10:33:57 -0700 Subject: [PATCH 35/92] [DOCS] Moves the info API to docs (#31121) --- docs/reference/rest-api/index.asciidoc | 2 +- .../reference}/rest-api/info.asciidoc | 32 +++++++++---------- 2 files changed, 17 insertions(+), 17 deletions(-) rename {x-pack/docs/en => docs/reference}/rest-api/info.asciidoc (84%) diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 8c58246a0a658..b9d3c9db60a6f 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -18,7 +18,7 @@ directly to configure and access {xpack} features. -- -include::{xes-repo-dir}/rest-api/info.asciidoc[] +include::info.asciidoc[] include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[] include::{xes-repo-dir}/rest-api/licensing.asciidoc[] include::{xes-repo-dir}/rest-api/migration.asciidoc[] diff --git a/x-pack/docs/en/rest-api/info.asciidoc b/docs/reference/rest-api/info.asciidoc similarity index 84% rename from x-pack/docs/en/rest-api/info.asciidoc rename to docs/reference/rest-api/info.asciidoc index ccb979124f2da..1cf4ab563b185 100644 --- a/x-pack/docs/en/rest-api/info.asciidoc +++ b/docs/reference/rest-api/info.asciidoc @@ -1,8 +1,9 @@ [role="xpack"] +[testenv="basic"] [[info-api]] == Info API -The info API provides general information about the installed {xpack}. +The info API provides general information about the installed {xpack} features. [float] === Request @@ -55,30 +56,29 @@ Example response: "date" : "2015-04-07T13:34:42Z" }, "license" : { - "uid" : "893361dc-9749-4997-93cb-802e3dofh7aa", - "type" : "trial", - "mode" : "trial", - "status" : "active", - "expiry_date_in_millis" : 1914278399999 + "uid" : "893361dc-9749-4997-93cb-xxx", + "type" : "basic", + "mode" : "basic", + "status" : "active" }, "features" : { "graph" : { "description" : "Graph Data Exploration for the Elastic Stack", - "available" : true, + "available" : false, "enabled" : true }, "logstash" : { "description" : "Logstash management component for X-Pack", - "available" : true, + "available" : false, "enabled" : true }, "ml" : { "description" : "Machine Learning for the Elastic Stack", - "available" : true, + "available" : false, "enabled" : true, "native_code_info" : { - "version" : "6.0.0-alpha1-SNAPSHOT", - "build_hash" : "d081461967d61a" + "version" : "7.0.0-alpha1-SNAPSHOT", + "build_hash" : "99a07c016d5a73" } }, "monitoring" : { @@ -93,12 +93,12 @@ Example response: }, "security" : { "description" : "Security for the Elastic Stack", - "available" : true, + "available" : false, "enabled" : true }, "watcher" : { "description" : "Alerting, Notification and Automation for the Elastic Stack", - "available" : true, + "available" : false, "enabled" : true } }, @@ -107,10 +107,10 @@ Example response: ------------------------------------------------------------ // TESTRESPONSE[s/"hash" : "2798b1a3ce779b3611bb53a0082d4d741e4d3168",/"hash" : "$body.build.hash",/] // TESTRESPONSE[s/"date" : "2015-04-07T13:34:42Z"/"date" : "$body.build.date"/] -// TESTRESPONSE[s/"uid" : "893361dc-9749-4997-93cb-802e3dofh7aa",/"uid": "$body.license.uid",/] +// TESTRESPONSE[s/"uid" : "893361dc-9749-4997-93cb-xxx",/"uid": "$body.license.uid",/] // TESTRESPONSE[s/"expiry_date_in_millis" : 1914278399999/"expiry_date_in_millis" : "$body.license.expiry_date_in_millis"/] -// TESTRESPONSE[s/"version" : "6.0.0-alpha1-SNAPSHOT",/"version": "$body.features.ml.native_code_info.version",/] -// TESTRESPONSE[s/"build_hash" : "d081461967d61a"/"build_hash": "$body.features.ml.native_code_info.build_hash"/] +// TESTRESPONSE[s/"version" : "7.0.0-alpha1-SNAPSHOT",/"version": "$body.features.ml.native_code_info.version",/] +// TESTRESPONSE[s/"build_hash" : "99a07c016d5a73"/"build_hash": "$body.features.ml.native_code_info.build_hash"/] // So much s/// but at least we test that the layout is close to matching.... The following example only returns the build and features information: From 529e704b1162a232e9714ea49d13e8597f8578ba Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Tue, 19 Jun 2018 11:50:03 -0600 Subject: [PATCH 36/92] Unify http channels and exception handling (#31379) This is a general cleanup of channels and exception handling in http. This commit introduces a CloseableChannel that is a superclass of TcpChannel and HttpChannel. This allows us to unify the closing logic between tcp and http transports. Additionally, the normal http channels are extracted to the abstract server transport. Finally, this commit (mostly) unifies the exception handling between nio and netty4 http server transports. --- .../http/netty4/Netty4HttpChannel.java | 33 +++++ .../http/netty4/Netty4HttpRequestHandler.java | 11 +- .../netty4/Netty4HttpServerTransport.java | 71 ++++------- .../netty4/Netty4OpenChannelsHandler.java | 96 -------------- .../netty4/SimpleNetty4TransportTests.java | 3 +- .../http/nio/NioHttpChannel.java | 13 ++ .../http/nio/NioHttpServerTransport.java | 50 ++------ ...oSocketChannel.java => NioTcpChannel.java} | 4 +- ...tChannel.java => NioTcpServerChannel.java} | 4 +- .../transport/nio/NioTransport.java | 25 ++-- .../transport/nio/TcpReadWriteHandler.java | 4 +- .../nio/SimpleNioTransportTests.java | 3 +- .../common/network/CloseableChannel.java | 118 ++++++++++++++++++ .../http/AbstractHttpServerTransport.java | 62 ++++++++- .../http/DefaultRestChannel.java | 3 +- .../org/elasticsearch/http/HttpChannel.java | 10 +- .../org/elasticsearch/http/HttpStats.java | 4 +- .../elasticsearch/transport/TcpChannel.java | 87 +------------ .../elasticsearch/transport/TcpTransport.java | 31 ++--- .../test/rest/FakeRestRequest.java | 10 ++ .../transport/MockTcpTransportTests.java | 3 +- .../nio/SimpleMockNioTransportTests.java | 3 +- .../netty4/SecurityNetty4Transport.java | 9 +- .../http/netty4/Netty4HttpMockUtil.java | 23 ---- .../SecurityNetty4HttpServerTransport.java | 33 +++-- .../transport/nio/SecurityNioTransport.java | 21 ++-- ...ecurityNetty4HttpServerTransportTests.java | 8 -- .../nio/SimpleSecurityNioTransportTests.java | 3 +- 28 files changed, 353 insertions(+), 392 deletions(-) delete mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java rename plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/{TcpNioSocketChannel.java => NioTcpChannel.java} (92%) rename plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/{TcpNioServerSocketChannel.java => NioTcpServerChannel.java} (92%) create mode 100644 server/src/main/java/org/elasticsearch/common/network/CloseableChannel.java delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/http/netty4/Netty4HttpMockUtil.java diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java index 473985d21091b..981a417449f14 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java @@ -22,6 +22,7 @@ import io.netty.channel.Channel; import io.netty.channel.ChannelPromise; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.concurrent.CompletableContext; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpResponse; import org.elasticsearch.transport.netty4.Netty4Utils; @@ -31,9 +32,23 @@ public class Netty4HttpChannel implements HttpChannel { private final Channel channel; + private final CompletableContext closeContext = new CompletableContext<>(); Netty4HttpChannel(Channel channel) { this.channel = channel; + this.channel.closeFuture().addListener(f -> { + if (f.isSuccess()) { + closeContext.complete(null); + } else { + Throwable cause = f.cause(); + if (cause instanceof Error) { + Netty4Utils.maybeDie(cause); + closeContext.completeExceptionally(new Exception(cause)); + } else { + closeContext.completeExceptionally((Exception) cause); + } + } + }); } @Override @@ -65,6 +80,16 @@ public InetSocketAddress getRemoteAddress() { return (InetSocketAddress) channel.remoteAddress(); } + @Override + public void addCloseListener(ActionListener listener) { + closeContext.addListener(ActionListener.toBiConsumer(listener)); + } + + @Override + public boolean isOpen() { + return channel.isOpen(); + } + @Override public void close() { channel.close(); @@ -73,4 +98,12 @@ public void close() { public Channel getNettyChannel() { return channel; } + + @Override + public String toString() { + return "Netty4HttpChannel{" + + "localAddress=" + getLocalAddress() + + ", remoteAddress=" + getRemoteAddress() + + '}'; + } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java index 4547a63a9a278..124bd607ab7ae 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java @@ -29,6 +29,8 @@ import org.elasticsearch.http.HttpPipelinedRequest; import org.elasticsearch.transport.netty4.Netty4Utils; +import static org.elasticsearch.http.netty4.Netty4HttpServerTransport.HTTP_CHANNEL_KEY; + @ChannelHandler.Sharable class Netty4HttpRequestHandler extends SimpleChannelInboundHandler> { @@ -40,7 +42,7 @@ class Netty4HttpRequestHandler extends SimpleChannelInboundHandler msg) throws Exception { - Netty4HttpChannel channel = ctx.channel().attr(Netty4HttpServerTransport.HTTP_CHANNEL_KEY).get(); + Netty4HttpChannel channel = ctx.channel().attr(HTTP_CHANNEL_KEY).get(); FullHttpRequest request = msg.getRequest(); try { @@ -75,7 +77,12 @@ protected void channelRead0(ChannelHandlerContext ctx, HttpPipelinedRequest serverChannels = new ArrayList<>(); - // package private for testing - Netty4OpenChannelsHandler serverOpenChannels; - - private final Netty4CorsConfig corsConfig; public Netty4HttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool, @@ -216,8 +210,6 @@ public Settings settings() { protected void doStart() { boolean success = false; try { - this.serverOpenChannels = new Netty4OpenChannelsHandler(logger); - serverBootstrap = new ServerBootstrap(); serverBootstrap.group(new NioEventLoopGroup(workerCount, daemonThreadFactory(settings, @@ -281,10 +273,9 @@ static Netty4CorsConfig buildCorsConfig(Settings settings) { builder.allowCredentials(); } String[] strMethods = Strings.tokenizeToStringArray(SETTING_CORS_ALLOW_METHODS.get(settings), ","); - HttpMethod[] methods = Arrays.asList(strMethods) - .stream() + HttpMethod[] methods = Arrays.stream(strMethods) .map(HttpMethod::valueOf) - .toArray(size -> new HttpMethod[size]); + .toArray(HttpMethod[]::new); return builder.allowedRequestMethods(methods) .maxAge(SETTING_CORS_MAX_AGE.get(settings)) .allowedRequestHeaders(Strings.tokenizeToStringArray(SETTING_CORS_ALLOW_HEADERS.get(settings), ",")) @@ -327,15 +318,21 @@ protected void doStop() { Netty4Utils.closeChannels(serverChannels); } catch (IOException e) { logger.trace("exception while closing channels", e); + } finally { + serverChannels.clear(); } - serverChannels.clear(); } } - if (serverOpenChannels != null) { - serverOpenChannels.close(); - serverOpenChannels = null; + // TODO: Move all of channel closing to abstract class once server channels are handled + try { + CloseableChannel.closeChannels(new ArrayList<>(httpChannels), true); + } catch (Exception e) { + logger.warn("unexpected exception while closing http channels", e); } + httpChannels.clear(); + + if (serverBootstrap != null) { serverBootstrap.config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS).awaitUninterruptibly(); @@ -349,38 +346,18 @@ protected void doClose() { @Override public HttpStats stats() { - Netty4OpenChannelsHandler channels = serverOpenChannels; - return new HttpStats(channels == null ? 0 : channels.numberOfOpenChannels(), channels == null ? 0 : channels.totalChannels()); + return new HttpStats(httpChannels.size(), totalChannelsAccepted.get()); } - public Netty4CorsConfig getCorsConfig() { - return corsConfig; - } - - protected void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + @Override + protected void onException(HttpChannel channel, Exception cause) { if (cause instanceof ReadTimeoutException) { if (logger.isTraceEnabled()) { - logger.trace("Read timeout [{}]", ctx.channel().remoteAddress()); + logger.trace("Http read timeout {}", channel); } - ctx.channel().close(); + CloseableChannel.closeChannel(channel);; } else { - if (!lifecycle.started()) { - // ignore - return; - } - if (!NetworkExceptionHelper.isCloseConnectionException(cause)) { - logger.warn( - (Supplier) () -> new ParameterizedMessage( - "caught exception while handling client http traffic, closing connection {}", ctx.channel()), - cause); - ctx.channel().close(); - } else { - logger.debug( - (Supplier) () -> new ParameterizedMessage( - "caught exception while handling client http traffic, closing connection {}", ctx.channel()), - cause); - ctx.channel().close(); - } + super.onException(channel, cause); } } @@ -404,9 +381,8 @@ protected HttpChannelHandler(final Netty4HttpServerTransport transport, final Ht @Override protected void initChannel(Channel ch) throws Exception { - Netty4HttpChannel nettyTcpChannel = new Netty4HttpChannel(ch); - ch.attr(HTTP_CHANNEL_KEY).set(nettyTcpChannel); - ch.pipeline().addLast("openChannels", transport.serverOpenChannels); + Netty4HttpChannel nettyHttpChannel = new Netty4HttpChannel(ch); + ch.attr(HTTP_CHANNEL_KEY).set(nettyHttpChannel); ch.pipeline().addLast("read_timeout", new ReadTimeoutHandler(transport.readTimeoutMillis, TimeUnit.MILLISECONDS)); final HttpRequestDecoder decoder = new HttpRequestDecoder( handlingSettings.getMaxInitialLineLength(), @@ -423,10 +399,11 @@ protected void initChannel(Channel ch) throws Exception { ch.pipeline().addLast("encoder_compress", new HttpContentCompressor(handlingSettings.getCompressionLevel())); } if (handlingSettings.isCorsEnabled()) { - ch.pipeline().addLast("cors", new Netty4CorsHandler(transport.getCorsConfig())); + ch.pipeline().addLast("cors", new Netty4CorsHandler(transport.corsConfig)); } ch.pipeline().addLast("pipelining", new Netty4HttpPipeliningHandler(transport.logger, transport.pipeliningMaxEvents)); ch.pipeline().addLast("handler", requestHandler); + transport.serverAcceptedChannel(nettyHttpChannel); } @Override diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java deleted file mode 100644 index 2270c90967ff2..0000000000000 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.transport.netty4; - -import io.netty.channel.Channel; -import io.netty.channel.ChannelFuture; -import io.netty.channel.ChannelFutureListener; -import io.netty.channel.ChannelHandler; -import io.netty.channel.ChannelHandlerContext; -import io.netty.channel.ChannelInboundHandlerAdapter; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.metrics.CounterMetric; - -import java.io.IOException; -import java.util.Collections; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; - -@ChannelHandler.Sharable -public class Netty4OpenChannelsHandler extends ChannelInboundHandlerAdapter implements Releasable { - - final Set openChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); - final CounterMetric openChannelsMetric = new CounterMetric(); - final CounterMetric totalChannelsMetric = new CounterMetric(); - - final Logger logger; - - public Netty4OpenChannelsHandler(Logger logger) { - this.logger = logger; - } - - final ChannelFutureListener remover = new ChannelFutureListener() { - @Override - public void operationComplete(ChannelFuture future) throws Exception { - boolean removed = openChannels.remove(future.channel()); - if (removed) { - openChannelsMetric.dec(); - } - if (logger.isTraceEnabled()) { - logger.trace("channel closed: {}", future.channel()); - } - } - }; - - @Override - public void channelActive(ChannelHandlerContext ctx) throws Exception { - if (logger.isTraceEnabled()) { - logger.trace("channel opened: {}", ctx.channel()); - } - final boolean added = openChannels.add(ctx.channel()); - if (added) { - openChannelsMetric.inc(); - totalChannelsMetric.inc(); - ctx.channel().closeFuture().addListener(remover); - } - - super.channelActive(ctx); - } - - public long numberOfOpenChannels() { - return openChannelsMetric.count(); - } - - public long totalChannels() { - return totalChannelsMetric.count(); - } - - @Override - public void close() { - try { - Netty4Utils.closeChannels(openChannels); - } catch (IOException e) { - logger.trace("exception while closing channels", e); - } - openChannels.clear(); - } - -} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java index efa296b6278af..760ac1253c6fe 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -91,7 +92,7 @@ protected void closeConnectionChannel(Transport transport, Transport.Connection final Netty4Transport t = (Netty4Transport) transport; @SuppressWarnings("unchecked") final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; - TcpChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); + CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } public void testConnectException() throws UnknownHostException { diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java index 088f0e85dde23..255faab5ddad0 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java @@ -36,4 +36,17 @@ public class NioHttpChannel extends NioSocketChannel implements HttpChannel { public void sendResponse(HttpResponse response, ActionListener listener) { getContext().sendMessage(response, ActionListener.toBiConsumer(listener)); } + + @Override + public void addCloseListener(ActionListener listener) { + addCloseListener(ActionListener.toBiConsumer(listener)); + } + + @Override + public String toString() { + return "NioHttpChannel{" + + "localAddress=" + getLocalAddress() + + ", remoteAddress=" + getRemoteAddress() + + '}'; + } } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java index ba51f7c684818..aa0859e6146f2 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java @@ -20,22 +20,20 @@ package org.elasticsearch.http.nio; import io.netty.handler.codec.http.HttpMethod; -import io.netty.handler.timeout.ReadTimeoutException; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.NetworkExceptionHelper; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; @@ -44,6 +42,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.AbstractHttpServerTransport; import org.elasticsearch.http.BindHttpException; +import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpStats; import org.elasticsearch.http.nio.cors.NioCorsConfig; @@ -115,7 +114,6 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { private final int tcpReceiveBufferSize; private final Set serverChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); - private final Set socketChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); private NioGroup nioGroup; private HttpChannelFactory channelFactory; private final NioCorsConfig corsConfig; @@ -156,7 +154,7 @@ protected void doStart() { int workerCount = NIO_HTTP_WORKER_COUNT.get(settings); nioGroup = new NioGroup(daemonThreadFactory(this.settings, HTTP_SERVER_ACCEPTOR_THREAD_NAME_PREFIX), acceptorCount, daemonThreadFactory(this.settings, HTTP_SERVER_WORKER_THREAD_NAME_PREFIX), workerCount, - (s) -> new EventHandler(this::nonChannelExceptionCaught, s)); + (s) -> new EventHandler(this::onNonChannelException, s)); channelFactory = new HttpChannelFactory(); this.boundAddress = createBoundHttpAddress(); @@ -187,12 +185,13 @@ protected void doStop() { } } + // TODO: Move all of channel closing to abstract class once server channels are handled try { - closeChannels(new ArrayList<>(socketChannels)); + CloseableChannel.closeChannels(new ArrayList<>(httpChannels), true); } catch (Exception e) { logger.warn("unexpected exception while closing http channels", e); } - socketChannels.clear(); + httpChannels.clear(); try { nioGroup.close(); @@ -235,38 +234,7 @@ protected TransportAddress bindAddress(InetAddress hostAddress) { @Override public HttpStats stats() { - return new HttpStats(serverChannels.size(), socketChannels.size()); - } - - protected void exceptionCaught(NioSocketChannel channel, Exception cause) { - if (cause instanceof ReadTimeoutException) { - if (logger.isTraceEnabled()) { - logger.trace("Read timeout [{}]", channel.getRemoteAddress()); - } - channel.close(); - } else { - if (lifecycle.started() == false) { - // ignore - return; - } - if (NetworkExceptionHelper.isCloseConnectionException(cause) == false) { - logger.warn( - (Supplier) () -> new ParameterizedMessage( - "caught exception while handling client http traffic, closing connection {}", channel), - cause); - channel.close(); - } else { - logger.debug( - (Supplier) () -> new ParameterizedMessage( - "caught exception while handling client http traffic, closing connection {}", channel), - cause); - channel.close(); - } - } - } - - protected void nonChannelExceptionCaught(Exception ex) { - logger.warn(new ParameterizedMessage("exception caught on transport layer [thread={}]", Thread.currentThread().getName()), ex); + return new HttpStats(serverChannels.size(), totalChannelsAccepted.get()); } static NioCorsConfig buildCorsConfig(Settings settings) { @@ -324,7 +292,7 @@ private void closeChannels(List channels) { } private void acceptChannel(NioSocketChannel socketChannel) { - socketChannels.add(socketChannel); + super.serverAcceptedChannel((HttpChannel) socketChannel); } private class HttpChannelFactory extends ChannelFactory { @@ -342,7 +310,7 @@ public NioHttpChannel createChannel(NioSelector selector, SocketChannel channel) }; HttpReadWriteHandler httpReadWritePipeline = new HttpReadWriteHandler(nioChannel,NioHttpServerTransport.this, handlingSettings, corsConfig); - Consumer exceptionHandler = (e) -> exceptionCaught(nioChannel, e); + Consumer exceptionHandler = (e) -> onException(nioChannel, e); SocketChannelContext context = new BytesChannelContext(nioChannel, selector, exceptionHandler, httpReadWritePipeline, new InboundChannelBuffer(pageSupplier)); nioChannel.setContext(context); diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpNioSocketChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java similarity index 92% rename from plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpNioSocketChannel.java rename to plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java index ef2bc875aa994..d700ad567bc19 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpNioSocketChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java @@ -28,11 +28,11 @@ import java.net.StandardSocketOptions; import java.nio.channels.SocketChannel; -public class TcpNioSocketChannel extends NioSocketChannel implements TcpChannel { +public class NioTcpChannel extends NioSocketChannel implements TcpChannel { private final String profile; - public TcpNioSocketChannel(String profile, SocketChannel socketChannel) throws IOException { + public NioTcpChannel(String profile, SocketChannel socketChannel) throws IOException { super(socketChannel); this.profile = profile; } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpNioServerSocketChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java similarity index 92% rename from plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpNioServerSocketChannel.java rename to plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java index 946563225c66c..10bf4ed752321 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpNioServerSocketChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java @@ -32,11 +32,11 @@ * This is an implementation of {@link NioServerSocketChannel} that adheres to the {@link TcpChannel} * interface. As it is a server socket, setting SO_LINGER and sending messages is not supported. */ -public class TcpNioServerSocketChannel extends NioServerSocketChannel implements TcpChannel { +public class NioTcpServerChannel extends NioServerSocketChannel implements TcpChannel { private final String profile; - public TcpNioServerSocketChannel(String profile, ServerSocketChannel socketChannel) throws IOException { + public NioTcpServerChannel(String profile, ServerSocketChannel socketChannel) throws IOException { super(socketChannel); this.profile = profile; } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java index b85d707dcd934..cf7d37493cb38 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java @@ -40,7 +40,6 @@ import org.elasticsearch.nio.NioSocketChannel; import org.elasticsearch.nio.ServerChannelContext; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TcpChannel; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transports; @@ -78,14 +77,14 @@ protected NioTransport(Settings settings, ThreadPool threadPool, NetworkService } @Override - protected TcpNioServerSocketChannel bind(String name, InetSocketAddress address) throws IOException { + protected NioTcpServerChannel bind(String name, InetSocketAddress address) throws IOException { TcpChannelFactory channelFactory = this.profileToChannelFactory.get(name); return nioGroup.bindServerChannel(address, channelFactory); } @Override - protected TcpNioSocketChannel initiateChannel(InetSocketAddress address, ActionListener connectListener) throws IOException { - TcpNioSocketChannel channel = nioGroup.openChannel(address, clientChannelFactory); + protected NioTcpChannel initiateChannel(InetSocketAddress address, ActionListener connectListener) throws IOException { + NioTcpChannel channel = nioGroup.openChannel(address, clientChannelFactory); channel.addConnectListener(ActionListener.toBiConsumer(connectListener)); return channel; } @@ -131,19 +130,15 @@ protected void stopInternal() { profileToChannelFactory.clear(); } - protected void exceptionCaught(NioSocketChannel channel, Exception exception) { - onException((TcpChannel) channel, exception); - } - protected void acceptChannel(NioSocketChannel channel) { - serverAcceptedChannel((TcpNioSocketChannel) channel); + serverAcceptedChannel((NioTcpChannel) channel); } protected TcpChannelFactory channelFactory(ProfileSettings settings, boolean isClient) { return new TcpChannelFactoryImpl(settings); } - protected abstract class TcpChannelFactory extends ChannelFactory { + protected abstract class TcpChannelFactory extends ChannelFactory { protected TcpChannelFactory(RawChannelFactory rawChannelFactory) { super(rawChannelFactory); @@ -164,14 +159,14 @@ private TcpChannelFactoryImpl(ProfileSettings profileSettings) { } @Override - public TcpNioSocketChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { - TcpNioSocketChannel nioChannel = new TcpNioSocketChannel(profileName, channel); + public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { + NioTcpChannel nioChannel = new NioTcpChannel(profileName, channel); Supplier pageSupplier = () -> { Recycler.V bytes = pageCacheRecycler.bytePage(false); return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close); }; TcpReadWriteHandler readWriteHandler = new TcpReadWriteHandler(nioChannel, NioTransport.this); - Consumer exceptionHandler = (e) -> exceptionCaught(nioChannel, e); + Consumer exceptionHandler = (e) -> onException(nioChannel, e); BytesChannelContext context = new BytesChannelContext(nioChannel, selector, exceptionHandler, readWriteHandler, new InboundChannelBuffer(pageSupplier)); nioChannel.setContext(context); @@ -179,8 +174,8 @@ public TcpNioSocketChannel createChannel(NioSelector selector, SocketChannel cha } @Override - public TcpNioServerSocketChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { - TcpNioServerSocketChannel nioChannel = new TcpNioServerSocketChannel(profileName, channel); + public NioTcpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { + NioTcpServerChannel nioChannel = new NioTcpServerChannel(profileName, channel); Consumer exceptionHandler = (e) -> logger.error(() -> new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); Consumer acceptor = NioTransport.this::acceptChannel; diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpReadWriteHandler.java index f2d07b180855c..e86653b685820 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpReadWriteHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/TcpReadWriteHandler.java @@ -28,10 +28,10 @@ public class TcpReadWriteHandler extends BytesWriteHandler { - private final TcpNioSocketChannel channel; + private final NioTcpChannel channel; private final TcpTransport transport; - public TcpReadWriteHandler(TcpNioSocketChannel channel, TcpTransport transport) { + public TcpReadWriteHandler(NioTcpChannel channel, TcpTransport transport) { this.channel = channel; this.transport = transport; } diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java index c78ae25e44a06..090fc579c4899 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -96,7 +97,7 @@ protected MockTransportService build(Settings settings, Version version, Cluster protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; - TcpChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); + CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } public void testConnectException() throws UnknownHostException { diff --git a/server/src/main/java/org/elasticsearch/common/network/CloseableChannel.java b/server/src/main/java/org/elasticsearch/common/network/CloseableChannel.java new file mode 100644 index 0000000000000..6b89a90aa2c77 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/network/CloseableChannel.java @@ -0,0 +1,118 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.network; + +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.core.internal.io.IOUtils; + +import java.io.Closeable; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.ExecutionException; + +public interface CloseableChannel extends Closeable { + + /** + * Closes the channel. For most implementations, this will be be an asynchronous process. For this + * reason, this method does not throw {@link java.io.IOException} There is no guarantee that the channel + * will be closed when this method returns. Use the {@link #addCloseListener(ActionListener)} method + * to implement logic that depends on knowing when the channel is closed. + */ + @Override + void close(); + + /** + * Adds a listener that will be executed when the channel is closed. If the channel is still open when + * this listener is added, the listener will be executed by the thread that eventually closes the + * channel. If the channel is already closed when the listener is added the listener will immediately be + * executed by the thread that is attempting to add the listener. + * + * @param listener to be executed + */ + void addCloseListener(ActionListener listener); + + /** + * Indicates whether a channel is currently open + * + * @return boolean indicating if channel is open + */ + boolean isOpen(); + + /** + * Closes the channel without blocking. + * + * @param channel to close + */ + static void closeChannel(C channel) { + closeChannel(channel, false); + } + + /** + * Closes the channel. + * + * @param channel to close + * @param blocking indicates if we should block on channel close + */ + static void closeChannel(C channel, boolean blocking) { + closeChannels(Collections.singletonList(channel), blocking); + } + + /** + * Closes the channels. + * + * @param channels to close + * @param blocking indicates if we should block on channel close + */ + static void closeChannels(List channels, boolean blocking) { + try { + IOUtils.close(channels); + } catch (IOException e) { + // The CloseableChannel#close method does not throw IOException, so this should not occur. + throw new UncheckedIOException(e); + } + if (blocking) { + ArrayList> futures = new ArrayList<>(channels.size()); + for (final C channel : channels) { + PlainActionFuture closeFuture = PlainActionFuture.newFuture(); + channel.addCloseListener(closeFuture); + futures.add(closeFuture); + } + blockOnFutures(futures); + } + } + + static void blockOnFutures(List> futures) { + for (ActionFuture future : futures) { + try { + future.get(); + } catch (ExecutionException e) { + // Ignore as we are only interested in waiting for the close process to complete. Logging + // close exceptions happens elsewhere. + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index 4fad4159f55d8..9d9008f7fb879 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -21,12 +21,16 @@ import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.IntSet; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; +import org.elasticsearch.common.transport.NetworkExceptionHelper; import org.elasticsearch.common.transport.PortsRange; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; @@ -41,9 +45,14 @@ import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; +import java.nio.channels.CancelledKeyException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_BIND_HOST; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH; @@ -60,11 +69,13 @@ public abstract class AbstractHttpServerTransport extends AbstractLifecycleCompo protected final Dispatcher dispatcher; private final NamedXContentRegistry xContentRegistry; - protected final String[] bindHosts; - protected final String[] publishHosts; protected final PortsRange port; protected final ByteSizeValue maxContentLength; + private final String[] bindHosts; + private final String[] publishHosts; + protected final AtomicLong totalChannelsAccepted = new AtomicLong(); + protected final Set httpChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); protected volatile BoundTransportAddress boundAddress; protected AbstractHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool, @@ -166,6 +177,49 @@ static int resolvePublishPort(Settings settings, List boundAdd return publishPort; } + protected void onException(HttpChannel channel, Exception e) { + if (lifecycle.started() == false) { + // just close and ignore - we are already stopped and just need to make sure we release all resources + CloseableChannel.closeChannel(channel); + return; + } + if (NetworkExceptionHelper.isCloseConnectionException(e)) { + logger.trace(() -> new ParameterizedMessage( + "close connection exception caught while handling client http traffic, closing connection {}", channel), e); + CloseableChannel.closeChannel(channel); + } else if (NetworkExceptionHelper.isConnectException(e)) { + logger.trace(() -> new ParameterizedMessage( + "connect exception caught while handling client http traffic, closing connection {}", channel), e); + CloseableChannel.closeChannel(channel); + } else if (e instanceof CancelledKeyException) { + logger.trace(() -> new ParameterizedMessage( + "cancelled key exception caught while handling client http traffic, closing connection {}", channel), e); + CloseableChannel.closeChannel(channel); + } else { + logger.warn(() -> new ParameterizedMessage( + "caught exception while handling client http traffic, closing connection {}", channel), e); + CloseableChannel.closeChannel(channel); + } + } + + /** + * Exception handler for exceptions that are not associated with a specific channel. + * + * @param exception the exception + */ + protected void onNonChannelException(Exception exception) { + logger.warn(new ParameterizedMessage("exception caught on transport layer [thread={}]", Thread.currentThread().getName()), + exception); + } + + protected void serverAcceptedChannel(HttpChannel httpChannel) { + boolean addedOnThisCall = httpChannels.add(httpChannel); + assert addedOnThisCall : "Channel should only be added to http channel set once"; + totalChannelsAccepted.incrementAndGet(); + httpChannel.addCloseListener(ActionListener.wrap(() -> httpChannels.remove(httpChannel))); + logger.trace(() -> new ParameterizedMessage("Http channel accepted: {}", httpChannel)); + } + /** * This method handles an incoming http request. * @@ -181,7 +235,7 @@ public void incomingRequest(final HttpRequest httpRequest, final HttpChannel htt * * @param httpRequest that is incoming * @param httpChannel that received the http request - * @param exception that was encountered + * @param exception that was encountered */ public void incomingRequestError(final HttpRequest httpRequest, final HttpChannel httpChannel, final Exception exception) { handleIncomingRequest(httpRequest, httpChannel, exception); @@ -219,7 +273,7 @@ private void handleIncomingRequest(final HttpRequest httpRequest, final HttpChan innerRestRequest = requestWithoutContentTypeHeader(httpRequest, httpChannel, badRequestCause); } catch (final RestRequest.BadParameterException e) { badRequestCause = ExceptionsHelper.useOrSuppress(badRequestCause, e); - innerRestRequest = RestRequest.requestWithoutParameters(xContentRegistry, httpRequest, httpChannel); + innerRestRequest = RestRequest.requestWithoutParameters(xContentRegistry, httpRequest, httpChannel); } restRequest = innerRestRequest; } diff --git a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java index f5924bb239eae..38bf1e751ef9d 100644 --- a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java +++ b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.rest.AbstractRestChannel; @@ -114,7 +115,7 @@ public void sendResponse(RestResponse restResponse) { } if (isCloseConnection()) { - toClose.add(httpChannel); + toClose.add(() -> CloseableChannel.closeChannel(httpChannel)); } ActionListener listener = ActionListener.wrap(() -> Releasables.close(toClose)); diff --git a/server/src/main/java/org/elasticsearch/http/HttpChannel.java b/server/src/main/java/org/elasticsearch/http/HttpChannel.java index baea3e0c3b3c3..ea8d3c276b16d 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpChannel.java +++ b/server/src/main/java/org/elasticsearch/http/HttpChannel.java @@ -20,11 +20,11 @@ package org.elasticsearch.http; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.network.CloseableChannel; import java.net.InetSocketAddress; -public interface HttpChannel extends Releasable { +public interface HttpChannel extends CloseableChannel { /** * Sends a http response to the channel. The listener will be executed once the send process has been @@ -49,10 +49,4 @@ public interface HttpChannel extends Releasable { */ InetSocketAddress getRemoteAddress(); - /** - * Closes the channel. This might be an asynchronous process. There is no guarantee that the channel - * will be closed when this method returns. - */ - void close(); - } diff --git a/server/src/main/java/org/elasticsearch/http/HttpStats.java b/server/src/main/java/org/elasticsearch/http/HttpStats.java index ac7f0d69485fe..4809315ce1810 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpStats.java +++ b/server/src/main/java/org/elasticsearch/http/HttpStats.java @@ -32,9 +32,9 @@ public class HttpStats implements Writeable, ToXContentFragment { private final long serverOpen; private final long totalOpen; - public HttpStats(long serverOpen, long totalOpen) { + public HttpStats(long serverOpen, long totalOpened) { this.serverOpen = serverOpen; - this.totalOpen = totalOpen; + this.totalOpen = totalOpened; } public HttpStats(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/transport/TcpChannel.java b/server/src/main/java/org/elasticsearch/transport/TcpChannel.java index 1a022ee9f4856..bc5cc2c92f2cb 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpChannel.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpChannel.java @@ -21,17 +21,13 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.unit.TimeValue; import java.io.IOException; import java.net.InetSocketAddress; -import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -43,30 +39,13 @@ * abstraction used by the {@link TcpTransport} and {@link TransportService}. All tcp transport * implementations must return channels that adhere to the required method contracts. */ -public interface TcpChannel extends Releasable { - - /** - * Closes the channel. This might be an asynchronous process. There is no guarantee that the channel - * will be closed when this method returns. Use the {@link #addCloseListener(ActionListener)} method - * to implement logic that depends on knowing when the channel is closed. - */ - void close(); +public interface TcpChannel extends CloseableChannel { /** * This returns the profile for this channel. */ String getProfile(); - /** - * Adds a listener that will be executed when the channel is closed. If the channel is still open when - * this listener is added, the listener will be executed by the thread that eventually closes the - * channel. If the channel is already closed when the listener is added the listener will immediately be - * executed by the thread that is attempting to add the listener. - * - * @param listener to be executed - */ - void addCloseListener(ActionListener listener); - /** * This sets the low level socket option {@link java.net.StandardSocketOptions} SO_LINGER on a channel. @@ -77,13 +56,6 @@ public interface TcpChannel extends Releasable { void setSoLinger(int value) throws IOException; - /** - * Indicates whether a channel is currently open - * - * @return boolean indicating if channel is open - */ - boolean isOpen(); - /** * Returns the local address for this channel. * @@ -107,48 +79,6 @@ public interface TcpChannel extends Releasable { */ void sendMessage(BytesReference reference, ActionListener listener); - /** - * Closes the channel without blocking. - * - * @param channel to close - */ - static void closeChannel(C channel) { - closeChannel(channel, false); - } - - /** - * Closes the channel. - * - * @param channel to close - * @param blocking indicates if we should block on channel close - */ - static void closeChannel(C channel, boolean blocking) { - closeChannels(Collections.singletonList(channel), blocking); - } - - /** - * Closes the channels. - * - * @param channels to close - * @param blocking indicates if we should block on channel close - */ - static void closeChannels(List channels, boolean blocking) { - if (blocking) { - ArrayList> futures = new ArrayList<>(channels.size()); - for (final C channel : channels) { - if (channel.isOpen()) { - PlainActionFuture closeFuture = PlainActionFuture.newFuture(); - channel.addCloseListener(closeFuture); - channel.close(); - futures.add(closeFuture); - } - } - blockOnFutures(futures); - } else { - Releasables.close(channels); - } - } - /** * Awaits for all of the pending connections to complete. Will throw an exception if at least one of the * connections fails. @@ -188,17 +118,4 @@ static void awaitConnected(DiscoveryNode discoveryNode, List> } } - static void blockOnFutures(List> futures) { - for (ActionFuture future : futures) { - try { - future.get(); - } catch (ExecutionException e) { - // Ignore as we are only interested in waiting for the close process to complete. Logging - // close exceptions happens elsewhere. - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new IllegalStateException("Future got interrupted", e); - } - } - } } diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index c577fae486744..bd862c19e9c6d 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -22,6 +22,7 @@ import com.carrotsearch.hppc.IntSet; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; @@ -479,7 +480,7 @@ public void close() { } boolean block = lifecycle.stopped() && Transports.isTransportThread(Thread.currentThread()) == false; - TcpChannel.closeChannels(channels, block); + CloseableChannel.closeChannels(channels, block); } finally { transportService.onConnectionClosed(this); } @@ -623,7 +624,7 @@ public final NodeChannels openConnection(DiscoveryNode node, ConnectionProfile c channels.add(channel); } catch (Exception e) { // If there was an exception when attempting to instantiate the raw channels, we close all of the channels - TcpChannel.closeChannels(channels, false); + CloseableChannel.closeChannels(channels, false); throw e; } } @@ -632,7 +633,7 @@ public final NodeChannels openConnection(DiscoveryNode node, ConnectionProfile c try { TcpChannel.awaitConnected(node, connectionFutures, connectionProfile.getConnectTimeout()); } catch (Exception ex) { - TcpChannel.closeChannels(channels, false); + CloseableChannel.closeChannels(channels, false); throw ex; } @@ -643,7 +644,7 @@ public final NodeChannels openConnection(DiscoveryNode node, ConnectionProfile c try { version = executeHandshake(node, handshakeChannel, connectionProfile.getHandshakeTimeout()); } catch (Exception ex) { - TcpChannel.closeChannels(channels, false); + CloseableChannel.closeChannels(channels, false); throw ex; } @@ -962,12 +963,12 @@ protected final void doStop() { ActionListener closeFailLogger = ActionListener.wrap(c -> {}, e -> logger.warn(() -> new ParameterizedMessage("Error closing serverChannel for profile [{}]", profile), e)); channels.forEach(c -> c.addCloseListener(closeFailLogger)); - TcpChannel.closeChannels(channels, true); + CloseableChannel.closeChannels(channels, true); } serverChannels.clear(); // close all of the incoming channels. The closeChannels method takes a list so we must convert the set. - TcpChannel.closeChannels(new ArrayList<>(acceptedChannels), true); + CloseableChannel.closeChannels(new ArrayList<>(acceptedChannels), true); acceptedChannels.clear(); @@ -1001,7 +1002,7 @@ protected final void doStop() { protected void onException(TcpChannel channel, Exception e) { if (!lifecycle.started()) { // just close and ignore - we are already stopped and just need to make sure we release all resources - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); return; } @@ -1009,20 +1010,20 @@ protected void onException(TcpChannel channel, Exception e) { logger.trace(() -> new ParameterizedMessage( "close connection exception caught on transport layer [{}], disconnecting from relevant node", channel), e); // close the channel, which will cause a node to be disconnected if relevant - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (isConnectException(e)) { logger.trace(() -> new ParameterizedMessage("connect exception caught on transport layer [{}]", channel), e); // close the channel as safe measure, which will cause a node to be disconnected if relevant - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (e instanceof BindException) { logger.trace(() -> new ParameterizedMessage("bind exception caught on transport layer [{}]", channel), e); // close the channel as safe measure, which will cause a node to be disconnected if relevant - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (e instanceof CancelledKeyException) { logger.trace(() -> new ParameterizedMessage( "cancelled key exception caught on transport layer [{}], disconnecting from relevant node", channel), e); // close the channel as safe measure, which will cause a node to be disconnected if relevant - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (e instanceof TcpTransport.HttpOnTransportException) { // in case we are able to return data, serialize the exception content and sent it back to the client if (channel.isOpen()) { @@ -1030,13 +1031,13 @@ protected void onException(TcpChannel channel, Exception e) { final SendMetricListener closeChannel = new SendMetricListener(message.length()) { @Override protected void innerInnerOnResponse(Void v) { - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } @Override protected void innerOnFailure(Exception e) { logger.debug("failed to send message to httpOnTransport channel", e); - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } }; internalSendMessage(channel, message, closeChannel); @@ -1044,7 +1045,7 @@ protected void innerOnFailure(Exception e) { } else { logger.warn(() -> new ParameterizedMessage("exception caught on transport layer [{}], closing connection", channel), e); // close the channel, which will cause a node to be disconnected if relevant - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } } @@ -1060,7 +1061,7 @@ protected void onNonChannelException(Exception exception) { protected void serverAcceptedChannel(TcpChannel channel) { boolean addedOnThisCall = acceptedChannels.add(channel); - assert addedOnThisCall : "Channel should only be added to accept channel set once"; + assert addedOnThisCall : "Channel should only be added to accepted channel set once"; channel.addCloseListener(ActionListener.wrap(() -> acceptedChannels.remove(channel))); logger.trace(() -> new ParameterizedMessage("Tcp transport channel accepted: {}", channel)); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java index 4d4743156c73d..2aec495390b6c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java @@ -143,6 +143,16 @@ public InetSocketAddress getRemoteAddress() { return remoteAddress; } + @Override + public void addCloseListener(ActionListener listener) { + + } + + @Override + public boolean isOpen() { + return true; + } + @Override public void close() { diff --git a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java index e9f5f86462f54..84c82f4159dc6 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -65,7 +66,7 @@ protected void closeConnectionChannel(Transport transport, Transport.Connection final MockTcpTransport t = (MockTcpTransport) transport; @SuppressWarnings("unchecked") final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; - TcpChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); + CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } } diff --git a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java index bd7fddf82b858..cf9eb5d7a8c57 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -97,7 +98,7 @@ protected MockTransportService build(Settings settings, Version version, Cluster protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; - TcpChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); + CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } public void testConnectException() throws UnknownHostException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java index d897d55e5fdc4..ce06712722cd1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java @@ -13,6 +13,7 @@ import io.netty.handler.ssl.SslHandler; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; @@ -111,7 +112,7 @@ protected ChannelHandler getClientChannelInitializer() { protected void onException(TcpChannel channel, Exception e) { if (!lifecycle.started()) { // just close and ignore - we are already stopped and just need to make sure we release all resources - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (SSLExceptionHelper.isNotSslRecordException(e)) { if (logger.isTraceEnabled()) { logger.trace( @@ -119,21 +120,21 @@ protected void onException(TcpChannel channel, Exception e) { } else { logger.warn("received plaintext traffic on an encrypted channel, closing connection {}", channel); } - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (SSLExceptionHelper.isCloseDuringHandshakeException(e)) { if (logger.isTraceEnabled()) { logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", channel), e); } else { logger.warn("connection {} closed during handshake", channel); } - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else if (SSLExceptionHelper.isReceivedCertificateUnknownException(e)) { if (logger.isTraceEnabled()) { logger.trace(new ParameterizedMessage("client did not trust server's certificate, closing connection {}", channel), e); } else { logger.warn("client did not trust this server's certificate, closing connection {}", channel); } - TcpChannel.closeChannel(channel); + CloseableChannel.closeChannel(channel); } else { super.onException(channel, e); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/http/netty4/Netty4HttpMockUtil.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/http/netty4/Netty4HttpMockUtil.java deleted file mode 100644 index 87e3e78cbc4a3..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/http/netty4/Netty4HttpMockUtil.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.http.netty4; - -import org.elasticsearch.transport.netty4.Netty4OpenChannelsHandler; - -import static org.mockito.Mockito.mock; - -/** Allows setting a mock into Netty3HttpServerTransport */ -public class Netty4HttpMockUtil { - - /** - * We don't really need to start Netty for these tests, but we can't create a pipeline - * with a null handler. So we set it to a mock for tests. - */ - public static void setOpenChannelsHandlerToMock(Netty4HttpServerTransport transport) throws Exception { - transport.serverOpenChannels = mock(Netty4OpenChannelsHandler.class); - } - -} \ No newline at end of file diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java index ac586c4945794..9667ca675b4c1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java @@ -7,16 +7,16 @@ import io.netty.channel.Channel; import io.netty.channel.ChannelHandler; -import io.netty.channel.ChannelHandlerContext; import io.netty.handler.ssl.SslHandler; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.netty4.Netty4HttpServerTransport; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.netty4.Netty4Utils; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.transport.filter.IPFilter; @@ -57,37 +57,36 @@ public SecurityNetty4HttpServerTransport(Settings settings, NetworkService netwo } @Override - protected void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { - Netty4Utils.maybeDie(cause); + protected void onException(HttpChannel channel, Exception e) { if (!lifecycle.started()) { return; } - if (isNotSslRecordException(cause)) { + if (isNotSslRecordException(e)) { if (logger.isTraceEnabled()) { logger.trace(new ParameterizedMessage("received plaintext http traffic on a https channel, closing connection {}", - ctx.channel()), cause); + channel), e); } else { - logger.warn("received plaintext http traffic on a https channel, closing connection {}", ctx.channel()); + logger.warn("received plaintext http traffic on a https channel, closing connection {}", channel); } - ctx.channel().close(); - } else if (isCloseDuringHandshakeException(cause)) { + CloseableChannel.closeChannel(channel); + } else if (isCloseDuringHandshakeException(e)) { if (logger.isTraceEnabled()) { - logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", ctx.channel()), cause); + logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", channel), e); } else { - logger.warn("connection {} closed during ssl handshake", ctx.channel()); + logger.warn("connection {} closed during ssl handshake", channel); } - ctx.channel().close(); - } else if (isReceivedCertificateUnknownException(cause)) { + CloseableChannel.closeChannel(channel); + } else if (isReceivedCertificateUnknownException(e)) { if (logger.isTraceEnabled()) { logger.trace(new ParameterizedMessage("http client did not trust server's certificate, closing connection {}", - ctx.channel()), cause); + channel), e); } else { - logger.warn("http client did not trust this server's certificate, closing connection {}", ctx.channel()); + logger.warn("http client did not trust this server's certificate, closing connection {}", channel); } - ctx.channel().close(); + CloseableChannel.closeChannel(channel); } else { - super.exceptionCaught(ctx, cause); + super.onException(channel, e); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java index 39ce1a0150c4f..5315a944f778d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java @@ -14,14 +14,14 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.nio.InboundChannelBuffer; +import org.elasticsearch.nio.NioSelector; import org.elasticsearch.nio.NioSocketChannel; import org.elasticsearch.nio.ServerChannelContext; -import org.elasticsearch.nio.NioSelector; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.nio.NioTcpChannel; +import org.elasticsearch.transport.nio.NioTcpServerChannel; import org.elasticsearch.transport.nio.NioTransport; -import org.elasticsearch.transport.nio.TcpNioServerSocketChannel; -import org.elasticsearch.transport.nio.TcpNioSocketChannel; import org.elasticsearch.transport.nio.TcpReadWriteHandler; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.transport.netty4.SecurityNetty4Transport; @@ -95,11 +95,6 @@ protected void acceptChannel(NioSocketChannel channel) { super.acceptChannel(channel); } - @Override - protected void exceptionCaught(NioSocketChannel channel, Exception exception) { - super.exceptionCaught(channel, exception); - } - private class SecurityTcpChannelFactory extends TcpChannelFactory { private final String profileName; @@ -116,11 +111,11 @@ private SecurityTcpChannelFactory(ProfileSettings profileSettings, boolean isCli } @Override - public TcpNioSocketChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { + public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { SSLConfiguration defaultConfig = profileConfiguration.get(TcpTransport.DEFAULT_PROFILE); SSLEngine sslEngine = sslService.createSSLEngine(profileConfiguration.getOrDefault(profileName, defaultConfig), null, -1); SSLDriver sslDriver = new SSLDriver(sslEngine, isClient); - TcpNioSocketChannel nioChannel = new TcpNioSocketChannel(profileName, channel); + NioTcpChannel nioChannel = new NioTcpChannel(profileName, channel); Supplier pageSupplier = () -> { Recycler.V bytes = pageCacheRecycler.bytePage(false); return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close); @@ -128,15 +123,15 @@ public TcpNioSocketChannel createChannel(NioSelector selector, SocketChannel cha TcpReadWriteHandler readWriteHandler = new TcpReadWriteHandler(nioChannel, SecurityNioTransport.this); InboundChannelBuffer buffer = new InboundChannelBuffer(pageSupplier); - Consumer exceptionHandler = (e) -> exceptionCaught(nioChannel, e); + Consumer exceptionHandler = (e) -> onException(nioChannel, e); SSLChannelContext context = new SSLChannelContext(nioChannel, selector, exceptionHandler, sslDriver, readWriteHandler, buffer); nioChannel.setContext(context); return nioChannel; } @Override - public TcpNioServerSocketChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { - TcpNioServerSocketChannel nioChannel = new TcpNioServerSocketChannel(profileName, channel); + public NioTcpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { + NioTcpServerChannel nioChannel = new NioTcpServerChannel(profileName, channel); Consumer exceptionHandler = (e) -> logger.error(() -> new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); Consumer acceptor = SecurityNioTransport.this::acceptChannel; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java index 3ef298f3f232d..ec925f43abe79 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.http.NullDispatcher; -import org.elasticsearch.http.netty4.Netty4HttpMockUtil; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackSettings; @@ -26,7 +25,6 @@ import org.junit.Before; import javax.net.ssl.SSLEngine; - import java.nio.file.Path; import java.util.Collections; import java.util.Locale; @@ -65,7 +63,6 @@ public void testDefaultClientAuth() throws Exception { SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(IPFilter.class), sslService, mock(ThreadPool.class), xContentRegistry(), new NullDispatcher()); - Netty4HttpMockUtil.setOpenChannelsHandlerToMock(transport); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); @@ -82,7 +79,6 @@ public void testOptionalClientAuth() throws Exception { SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(IPFilter.class), sslService, mock(ThreadPool.class), xContentRegistry(), new NullDispatcher()); - Netty4HttpMockUtil.setOpenChannelsHandlerToMock(transport); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); @@ -99,7 +95,6 @@ public void testRequiredClientAuth() throws Exception { SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(IPFilter.class), sslService, mock(ThreadPool.class), xContentRegistry(), new NullDispatcher()); - Netty4HttpMockUtil.setOpenChannelsHandlerToMock(transport); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(true)); @@ -116,7 +111,6 @@ public void testNoClientAuth() throws Exception { SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(IPFilter.class), sslService, mock(ThreadPool.class), xContentRegistry(), new NullDispatcher()); - Netty4HttpMockUtil.setOpenChannelsHandlerToMock(transport); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); @@ -131,7 +125,6 @@ public void testCustomSSLConfiguration() throws Exception { SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(IPFilter.class), sslService, mock(ThreadPool.class), xContentRegistry(), new NullDispatcher()); - Netty4HttpMockUtil.setOpenChannelsHandlerToMock(transport); ChannelHandler handler = transport.configureServerChannelHandler(); EmbeddedChannel ch = new EmbeddedChannel(handler); SSLEngine defaultEngine = ch.pipeline().get(SslHandler.class).engine(); @@ -144,7 +137,6 @@ public void testCustomSSLConfiguration() throws Exception { sslService = new SSLService(settings, TestEnvironment.newEnvironment(settings)); transport = new SecurityNetty4HttpServerTransport(settings, new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(IPFilter.class), sslService, mock(ThreadPool.class), xContentRegistry(), new NullDispatcher()); - Netty4HttpMockUtil.setOpenChannelsHandlerToMock(transport); handler = transport.configureServerChannelHandler(); ch = new EmbeddedChannel(handler); SSLEngine customEngine = ch.pipeline().get(SslHandler.class).engine(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java index 0a7ee13b9e296..c5a6a525d4e10 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.MockSecureSettings; @@ -118,7 +119,7 @@ protected MockTransportService build(Settings settings, Version version, Cluster protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; - TcpChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); + CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } public void testConnectException() throws UnknownHostException { From dc57eece75334b4fb2875082790eda3cf5e5088d Mon Sep 17 00:00:00 2001 From: Jay Modi Date: Tue, 19 Jun 2018 11:58:34 -0600 Subject: [PATCH 37/92] Security: fix joining cluster with production license (#31341) The changes made to disable security for trial licenses unless security is explicitly enabled caused issues when a 6.3 node attempts to join a cluster that already has a production license installed. The new node starts off with a trial license and `xpack.security.enabled` is not set for the node, which causes the security code to skip attaching the user to the request. The existing cluster has security enabled and the lack of a user attached to the requests causes the request to be rejected. This commit changes the security code to check if the state has been recovered yet when making the decision on whether or not to attach a user. If the state has not yet been recovered, the code will attach the user to the request in case security is enabled on the cluster being joined. Closes #31332 --- .../license/XPackLicenseState.java | 6 +- .../license/XPackLicenseStateTests.java | 10 +++ .../xpack/security/Security.java | 2 +- .../SecurityServerTransportInterceptor.java | 33 +++++--- .../elasticsearch/license/LicensingTests.java | 48 ++++++++++++ ...curityServerTransportInterceptorTests.java | 78 ++++++++++++++++--- 6 files changed, 156 insertions(+), 21 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 2e4caff1a725d..e58c5eda06316 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -254,7 +254,11 @@ private static class Status { public XPackLicenseState(Settings settings) { this.isSecurityEnabled = XPackSettings.SECURITY_ENABLED.get(settings); - this.isSecurityExplicitlyEnabled = settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()) && isSecurityEnabled; + // 6.0+ requires TLS for production licenses, so if TLS is enabled and security is enabled + // we can interpret this as an explicit enabling of security if the security enabled + // setting is not explicitly set + this.isSecurityExplicitlyEnabled = isSecurityEnabled && + (settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()) || XPackSettings.TRANSPORT_SSL_ENABLED.get(settings)); } /** Updates the current state of the license, which will change what features are available. */ diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index 335932df770e8..f1503919570e6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -79,6 +79,16 @@ public void testSecurityDefaults() { assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.ALL)); assertThat(licenseState.isCustomRoleProvidersAllowed(), is(true)); + licenseState = + new XPackLicenseState(Settings.builder().put(XPackSettings.TRANSPORT_SSL_ENABLED.getKey(), true).build()); + assertThat(licenseState.isAuthAllowed(), is(true)); + assertThat(licenseState.isIpFilteringAllowed(), is(true)); + assertThat(licenseState.isAuditingAllowed(), is(true)); + assertThat(licenseState.isStatsAndHealthAllowed(), is(true)); + assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(true)); + assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.ALL)); + assertThat(licenseState.isCustomRoleProvidersAllowed(), is(true)); + licenseState = new XPackLicenseState(Settings.EMPTY); assertThat(licenseState.isAuthAllowed(), is(true)); assertThat(licenseState.isIpFilteringAllowed(), is(true)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index c0bd7882c419a..dbb50a92f1088 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -472,7 +472,7 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste components.add(ipFilter.get()); DestructiveOperations destructiveOperations = new DestructiveOperations(settings, clusterService.getClusterSettings()); securityInterceptor.set(new SecurityServerTransportInterceptor(settings, threadPool, authcService.get(), - authzService, getLicenseState(), getSslService(), securityContext.get(), destructiveOperations)); + authzService, getLicenseState(), getSslService(), securityContext.get(), destructiveOperations, clusterService)); final Set requestInterceptors; if (XPackSettings.DLS_FLS_ENABLED.get(settings)) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java index 785425ade9bd6..7de3e5d0980d6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java @@ -9,12 +9,14 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.DestructiveOperations; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -72,6 +74,8 @@ public class SecurityServerTransportInterceptor extends AbstractComponent implem private final SecurityContext securityContext; private final boolean reservedRealmEnabled; + private volatile boolean isStateNotRecovered = true; + public SecurityServerTransportInterceptor(Settings settings, ThreadPool threadPool, AuthenticationService authcService, @@ -79,7 +83,8 @@ public SecurityServerTransportInterceptor(Settings settings, XPackLicenseState licenseState, SSLService sslService, SecurityContext securityContext, - DestructiveOperations destructiveOperations) { + DestructiveOperations destructiveOperations, + ClusterService clusterService) { super(settings); this.settings = settings; this.threadPool = threadPool; @@ -90,6 +95,7 @@ public SecurityServerTransportInterceptor(Settings settings, this.securityContext = securityContext; this.profileFilters = initializeProfileFilters(destructiveOperations); this.reservedRealmEnabled = XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings); + clusterService.addListener(e -> isStateNotRecovered = e.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)); } @Override @@ -98,7 +104,13 @@ public AsyncSender interceptSender(AsyncSender sender) { @Override public void sendRequest(Transport.Connection connection, String action, TransportRequest request, TransportRequestOptions options, TransportResponseHandler handler) { - if (licenseState.isSecurityEnabled() && licenseState.isAuthAllowed()) { + // make a local copy of isStateNotRecovered as this is a volatile variable and it + // is used multiple times in the method. The copy to a local variable allows us to + // guarantee we use the same value wherever we would check the value for the state + // being recovered + final boolean stateNotRecovered = isStateNotRecovered; + final boolean sendWithAuth = (licenseState.isSecurityEnabled() && licenseState.isAuthAllowed()) || stateNotRecovered; + if (sendWithAuth) { // the transport in core normally does this check, BUT since we are serializing to a string header we need to do it // ourselves otherwise we wind up using a version newer than what we can actually send final Version minVersion = Version.min(connection.getVersion(), Version.CURRENT); @@ -108,20 +120,20 @@ public void sendRequest(Transport.Connection conne if (AuthorizationUtils.shouldReplaceUserWithSystem(threadPool.getThreadContext(), action)) { securityContext.executeAsUser(SystemUser.INSTANCE, (original) -> sendWithUser(connection, action, request, options, new ContextRestoreResponseHandler<>(threadPool.getThreadContext().wrapRestorable(original) - , handler), sender), minVersion); + , handler), sender, stateNotRecovered), minVersion); } else if (AuthorizationUtils.shouldSetUserBasedOnActionOrigin(threadPool.getThreadContext())) { AuthorizationUtils.switchUserBasedOnActionOriginAndExecute(threadPool.getThreadContext(), securityContext, (original) -> sendWithUser(connection, action, request, options, new ContextRestoreResponseHandler<>(threadPool.getThreadContext().wrapRestorable(original) - , handler), sender)); + , handler), sender, stateNotRecovered)); } else if (securityContext.getAuthentication() != null && securityContext.getAuthentication().getVersion().equals(minVersion) == false) { // re-write the authentication since we want the authentication version to match the version of the connection securityContext.executeAfterRewritingAuthentication(original -> sendWithUser(connection, action, request, options, - new ContextRestoreResponseHandler<>(threadPool.getThreadContext().wrapRestorable(original), handler), sender), - minVersion); + new ContextRestoreResponseHandler<>(threadPool.getThreadContext().wrapRestorable(original), handler), sender, + stateNotRecovered), minVersion); } else { - sendWithUser(connection, action, request, options, handler, sender); + sendWithUser(connection, action, request, options, handler, sender, stateNotRecovered); } } else { sender.sendRequest(connection, action, request, options, handler); @@ -132,9 +144,10 @@ public void sendRequest(Transport.Connection conne private void sendWithUser(Transport.Connection connection, String action, TransportRequest request, TransportRequestOptions options, TransportResponseHandler handler, - AsyncSender sender) { - // There cannot be a request outgoing from this node that is not associated with a user. - if (securityContext.getAuthentication() == null) { + AsyncSender sender, final boolean stateNotRecovered) { + // There cannot be a request outgoing from this node that is not associated with a user + // unless we do not know the actual license of the cluster + if (securityContext.getAuthentication() == null && stateNotRecovered == false) { // we use an assertion here to ensure we catch this in our testing infrastructure, but leave the ISE for cases we do not catch // in tests and may be hit by a user assertNoAuthentication(action); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java index 7fe510f3b4e39..2297a5353b6e0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java @@ -23,11 +23,16 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.node.MockNode; +import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.MockHttpTransport; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.transport.Netty4Plugin; import org.elasticsearch.transport.Transport; @@ -41,7 +46,10 @@ import org.junit.After; import org.junit.Before; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -115,6 +123,18 @@ protected Collection> nodePlugins() { return plugins; } + @Override + protected int maxNumberOfNodes() { + return super.maxNumberOfNodes() + 1; + } + + @Override + public Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false) + .build(); + } + @Before public void resetLicensing() { enableLicensing(); @@ -250,6 +270,34 @@ public void testTransportClientAuthenticationByLicenseType() throws Exception { } } + public void testNodeJoinWithoutSecurityExplicitlyEnabled() throws Exception { + License.OperationMode mode = randomFrom(License.OperationMode.GOLD, License.OperationMode.PLATINUM, License.OperationMode.STANDARD); + enableLicensing(mode); + ensureGreen(); + + Path home = createTempDir(); + Path conf = home.resolve("config"); + Files.createDirectories(conf); + Settings nodeSettings = Settings.builder() + .put(nodeSettings(maxNumberOfNodes() - 1).filter(s -> "xpack.security.enabled".equals(s) == false)) + .put("node.name", "my-test-node") + .put("network.host", "localhost") + .put("cluster.name", internalCluster().getClusterName()) + .put("discovery.zen.minimum_master_nodes", + internalCluster().getInstance(Settings.class).get("discovery.zen.minimum_master_nodes")) + .put("path.home", home) + .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false) + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "test-zen") + .put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "test-zen") + .build(); + Collection> mockPlugins = Arrays.asList(LocalStateSecurity.class, TestZenDiscovery.TestPlugin.class, + MockHttpTransport.TestPlugin.class); + try (Node node = new MockNode(nodeSettings, mockPlugins)) { + node.start(); + ensureStableCluster(cluster().size() + 1); + } + } + private static void assertElasticsearchSecurityException(ThrowingRunnable runnable) { ElasticsearchSecurityException ee = expectThrows(ElasticsearchSecurityException.class, runnable); assertThat(ee.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackField.SECURITY)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java index 0bc7c527df346..dd7dda48ae813 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java @@ -7,11 +7,17 @@ import org.elasticsearch.Version; import org.elasticsearch.action.support.DestructiveOperations; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.Transport.Connection; @@ -31,6 +37,7 @@ import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; +import org.junit.After; import java.util.Collections; import java.util.concurrent.atomic.AtomicBoolean; @@ -54,25 +61,33 @@ public class SecurityServerTransportInterceptorTests extends ESTestCase { private ThreadContext threadContext; private XPackLicenseState xPackLicenseState; private SecurityContext securityContext; + private ClusterService clusterService; @Override public void setUp() throws Exception { super.setUp(); settings = Settings.builder().put("path.home", createTempDir()).build(); - threadPool = mock(ThreadPool.class); - threadContext = new ThreadContext(settings); - when(threadPool.getThreadContext()).thenReturn(threadContext); + threadPool = new TestThreadPool(getTestName()); + clusterService = ClusterServiceUtils.createClusterService(threadPool); + threadContext = threadPool.getThreadContext(); securityContext = spy(new SecurityContext(settings, threadPool.getThreadContext())); xPackLicenseState = mock(XPackLicenseState.class); when(xPackLicenseState.isAuthAllowed()).thenReturn(true); when(xPackLicenseState.isSecurityEnabled()).thenReturn(true); } + @After + public void stopThreadPool() throws Exception { + clusterService.close(); + terminate(threadPool); + } + public void testSendAsyncUnlicensed() { SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService); + ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener when(xPackLicenseState.isAuthAllowed()).thenReturn(false); AtomicBoolean calledWrappedSender = new AtomicBoolean(false); AsyncSender sender = interceptor.interceptSender(new AsyncSender() { @@ -92,6 +107,46 @@ public void sendRequest(Transport.Connection conne verifyZeroInteractions(securityContext); } + public void testSendAsyncWithStateNotRecovered() { + SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, + mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), + securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService); + final boolean securityEnabled = randomBoolean(); + final boolean authAllowed = securityEnabled && randomBoolean(); + when(xPackLicenseState.isAuthAllowed()).thenReturn(authAllowed); + when(xPackLicenseState.isSecurityEnabled()).thenReturn(securityEnabled); + ClusterState notRecovered = ClusterState.builder(clusterService.state()) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK).build()) + .build(); + ClusterServiceUtils.setState(clusterService, notRecovered); + assertTrue(clusterService.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)); + + AtomicBoolean calledWrappedSender = new AtomicBoolean(false); + AtomicReference sendingUser = new AtomicReference<>(); + AsyncSender sender = interceptor.interceptSender(new AsyncSender() { + @Override + public void sendRequest(Transport.Connection connection, String action, TransportRequest request, + TransportRequestOptions options, TransportResponseHandler handler) { + if (calledWrappedSender.compareAndSet(false, true) == false) { + fail("sender called more than once!"); + } + sendingUser.set(securityContext.getUser()); + } + }); + Connection connection = mock(Connection.class); + when(connection.getVersion()).thenReturn(Version.CURRENT); + sender.sendRequest(connection, "internal:foo", null, null, null); + assertTrue(calledWrappedSender.get()); + assertEquals(SystemUser.INSTANCE, sendingUser.get()); + verify(xPackLicenseState).isSecurityEnabled(); + if (securityEnabled) { + verify(xPackLicenseState).isAuthAllowed(); + } + verify(securityContext).executeAsUser(any(User.class), any(Consumer.class), eq(Version.CURRENT)); + verifyNoMoreInteractions(xPackLicenseState); + } + public void testSendAsync() throws Exception { final User authUser = randomBoolean() ? new User("authenticator") : null; final User user = new User("test", randomRoles(), authUser); @@ -100,7 +155,8 @@ public void testSendAsync() throws Exception { SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService); + ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener AtomicBoolean calledWrappedSender = new AtomicBoolean(false); AtomicReference sendingUser = new AtomicReference<>(); @@ -136,7 +192,8 @@ public void testSendAsyncSwitchToSystem() throws Exception { SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService); + ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener AtomicBoolean calledWrappedSender = new AtomicBoolean(false); AtomicReference sendingUser = new AtomicReference<>(); @@ -167,11 +224,12 @@ public void testSendWithoutUser() throws Exception { SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))) { + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService) { @Override void assertNoAuthentication(String action) { } }; + ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener assertNull(securityContext.getUser()); AsyncSender sender = interceptor.interceptSender(new AsyncSender() { @@ -203,7 +261,8 @@ public void testSendToNewerVersionSetsCorrectVersion() throws Exception { SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService); + ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener AtomicBoolean calledWrappedSender = new AtomicBoolean(false); AtomicReference sendingUser = new AtomicReference<>(); @@ -243,7 +302,8 @@ public void testSendToOlderVersionSetsCorrectVersion() throws Exception { SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(settings, threadPool, mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); + Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))), clusterService); + ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener AtomicBoolean calledWrappedSender = new AtomicBoolean(false); AtomicReference sendingUser = new AtomicReference<>(); From b44e1c197871435f45bbacf2c64768fd3c4c21cf Mon Sep 17 00:00:00 2001 From: Sue Gallagher <36747279+Sue-Gallagher@users.noreply.github.com> Date: Tue, 19 Jun 2018 13:48:13 -0700 Subject: [PATCH 38/92] [DOCS] Removed and params from MLT. Closes #28128 (#31370) --- docs/reference/query-dsl/mlt-query.asciidoc | 6 ------ 1 file changed, 6 deletions(-) diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc index bd66c7f071cd4..19035d96ae04d 100644 --- a/docs/reference/query-dsl/mlt-query.asciidoc +++ b/docs/reference/query-dsl/mlt-query.asciidoc @@ -184,12 +184,6 @@ is the same as `like`. `fields`:: A list of fields to fetch and analyze the text from. -`like_text`:: -The text to find documents like it. - -`ids` or `docs`:: -A list of documents following the same syntax as the <>. - [float] [[mlt-query-term-selection]] ==== Term Selection Parameters From 5971eb83c434c1e52edbda2c038bf28740bab6d2 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 19 Jun 2018 13:57:10 -0700 Subject: [PATCH 39/92] [DOCS] Fixes code snippet testing for machine learning (#31189) --- x-pack/docs/build.gradle | 8 -- x-pack/docs/en/ml/aggregations.asciidoc | 3 +- x-pack/docs/en/ml/api-quickref.asciidoc | 4 +- x-pack/docs/en/ml/categories.asciidoc | 5 +- x-pack/docs/en/ml/configuring.asciidoc | 3 +- x-pack/docs/en/ml/customurl.asciidoc | 2 +- x-pack/docs/en/ml/functions.asciidoc | 3 +- x-pack/docs/en/ml/functions/count.asciidoc | 119 ++++++++++++++++---- x-pack/docs/en/ml/functions/geo.asciidoc | 30 ++++- x-pack/docs/en/ml/functions/info.asciidoc | 3 + x-pack/docs/en/ml/functions/metric.asciidoc | 20 +++- x-pack/docs/en/ml/functions/rare.asciidoc | 6 +- x-pack/docs/en/ml/functions/sum.asciidoc | 18 ++- x-pack/docs/en/ml/functions/time.asciidoc | 5 +- x-pack/docs/en/ml/populations.asciidoc | 3 +- x-pack/docs/en/ml/stopping-ml.asciidoc | 11 +- x-pack/docs/en/ml/transforms.asciidoc | 10 +- 17 files changed, 179 insertions(+), 74 deletions(-) diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 0d1def2b4f5e5..e97faf12a6cd0 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -9,13 +9,6 @@ apply plugin: 'elasticsearch.docs-test' * only remove entries from this list. When it is empty we'll remove it * entirely and have a party! There will be cake and everything.... */ buildRestTests.expectedUnconvertedCandidates = [ - 'en/ml/functions/count.asciidoc', - 'en/ml/functions/geo.asciidoc', - 'en/ml/functions/info.asciidoc', - 'en/ml/functions/metric.asciidoc', - 'en/ml/functions/rare.asciidoc', - 'en/ml/functions/sum.asciidoc', - 'en/ml/functions/time.asciidoc', 'en/rest-api/watcher/put-watch.asciidoc', 'en/security/authentication/user-cache.asciidoc', 'en/security/authorization/field-and-document-access-control.asciidoc', @@ -56,7 +49,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/watcher/troubleshooting.asciidoc', 'en/rest-api/license/delete-license.asciidoc', 'en/rest-api/license/update-license.asciidoc', - 'en/ml/api-quickref.asciidoc', 'en/rest-api/ml/delete-snapshot.asciidoc', 'en/rest-api/ml/forecast.asciidoc', 'en/rest-api/ml/get-bucket.asciidoc', diff --git a/x-pack/docs/en/ml/aggregations.asciidoc b/x-pack/docs/en/ml/aggregations.asciidoc index f3b8e6b3e34d6..5ff54b76f01b3 100644 --- a/x-pack/docs/en/ml/aggregations.asciidoc +++ b/x-pack/docs/en/ml/aggregations.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-configuring-aggregation]] -=== Aggregating Data For Faster Performance +=== Aggregating data for faster performance By default, {dfeeds} fetch data from {es} using search and scroll requests. It can be significantly more efficient, however, to aggregate data in {es} diff --git a/x-pack/docs/en/ml/api-quickref.asciidoc b/x-pack/docs/en/ml/api-quickref.asciidoc index 9602379c37416..dc87a6ba209c2 100644 --- a/x-pack/docs/en/ml/api-quickref.asciidoc +++ b/x-pack/docs/en/ml/api-quickref.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-api-quickref]] -== API Quick Reference +== API quick reference All {ml} endpoints have the following base: @@ -7,6 +8,7 @@ All {ml} endpoints have the following base: ---- /_xpack/ml/ ---- +// NOTCONSOLE The main {ml} resources can be accessed with a variety of endpoints: diff --git a/x-pack/docs/en/ml/categories.asciidoc b/x-pack/docs/en/ml/categories.asciidoc index bb217e2e18654..21f71b871cbb9 100644 --- a/x-pack/docs/en/ml/categories.asciidoc +++ b/x-pack/docs/en/ml/categories.asciidoc @@ -1,3 +1,4 @@ +[role="xpack"] [[ml-configuring-categories]] === Categorizing log messages @@ -77,7 +78,7 @@ NOTE: To add the `categorization_examples_limit` property, you must use the [float] [[ml-configuring-analyzer]] -==== Customizing the Categorization Analyzer +==== Customizing the categorization analyzer Categorization uses English dictionary words to identify log message categories. By default, it also uses English tokenization rules. For this reason, if you use @@ -213,7 +214,7 @@ API examples above. [float] [[ml-viewing-categories]] -==== Viewing Categorization Results +==== Viewing categorization results After you open the job and start the {dfeed} or supply data to the job, you can view the categorization results in {kib}. For example: diff --git a/x-pack/docs/en/ml/configuring.asciidoc b/x-pack/docs/en/ml/configuring.asciidoc index ba965a08b0462..c2c6e69a71128 100644 --- a/x-pack/docs/en/ml/configuring.asciidoc +++ b/x-pack/docs/en/ml/configuring.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-configuring]] -== Configuring Machine Learning +== Configuring machine learning If you want to use {xpackml} features, there must be at least one {ml} node in your cluster and all master-eligible nodes must have {ml} enabled. By default, diff --git a/x-pack/docs/en/ml/customurl.asciidoc b/x-pack/docs/en/ml/customurl.asciidoc index 7c773c4b9bf49..7c197084c0e5f 100644 --- a/x-pack/docs/en/ml/customurl.asciidoc +++ b/x-pack/docs/en/ml/customurl.asciidoc @@ -48,7 +48,7 @@ using the {ml} APIs. [float] [[ml-configuring-url-strings]] -==== String Substitution in Custom URLs +==== String substitution in custom URLs You can use dollar sign ($) delimited tokens in a custom URL. These tokens are substituted for the values of the corresponding fields in the anomaly records. diff --git a/x-pack/docs/en/ml/functions.asciidoc b/x-pack/docs/en/ml/functions.asciidoc index ae5f768e05697..e32470c6827b6 100644 --- a/x-pack/docs/en/ml/functions.asciidoc +++ b/x-pack/docs/en/ml/functions.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-functions]] -== Function Reference +== Function reference The {xpackml} features include analysis functions that provide a wide variety of flexible ways to analyze data for anomalies. diff --git a/x-pack/docs/en/ml/functions/count.asciidoc b/x-pack/docs/en/ml/functions/count.asciidoc index 4b70f80933dca..a2dc5645b61ae 100644 --- a/x-pack/docs/en/ml/functions/count.asciidoc +++ b/x-pack/docs/en/ml/functions/count.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-count-functions]] -=== Count Functions +=== Count functions Count functions detect anomalies when the number of events in a bucket is anomalous. @@ -21,7 +22,7 @@ The {xpackml} features include the following count functions: [float] [[ml-count]] -===== Count, High_count, Low_count +===== Count, high_count, low_count The `count` function detects anomalies when the number of events in a bucket is anomalous. @@ -44,8 +45,20 @@ see {ref}/ml-job-resource.html#ml-detectorconfig[Detector Configuration Objects] .Example 1: Analyzing events with the count function [source,js] -------------------------------------------------- -{ "function" : "count" } +PUT _xpack/ml/anomaly_detectors/example1 +{ + "analysis_config": { + "detectors": [{ + "function" : "count" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } +} -------------------------------------------------- +// CONSOLE This example is probably the simplest possible analysis. It identifies time buckets during which the overall count of events is higher or lower than @@ -57,12 +70,22 @@ and detects when the event rate is unusual compared to its past behavior. .Example 2: Analyzing errors with the high_count function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example2 { - "function" : "high_count", - "by_field_name" : "error_code", - "over_field_name": "user" + "analysis_config": { + "detectors": [{ + "function" : "high_count", + "by_field_name" : "error_code", + "over_field_name": "user" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } } -------------------------------------------------- +// CONSOLE If you use this `high_count` function in a detector in your job, it models the event rate for each error code. It detects users that generate an @@ -72,11 +95,21 @@ unusually high count of error codes compared to other users. .Example 3: Analyzing status codes with the low_count function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example3 { - "function" : "low_count", - "by_field_name" : "status_code" + "analysis_config": { + "detectors": [{ + "function" : "low_count", + "by_field_name" : "status_code" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } } -------------------------------------------------- +// CONSOLE In this example, the function detects when the count of events for a status code is lower than usual. @@ -88,22 +121,30 @@ compared to its past behavior. .Example 4: Analyzing aggregated data with the count function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example4 { - "summary_count_field_name" : "events_per_min", - "detectors" [ - { "function" : "count" } - ] -} + "analysis_config": { + "summary_count_field_name" : "events_per_min", + "detectors": [{ + "function" : "count" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } +} -------------------------------------------------- +// CONSOLE If you are analyzing an aggregated `events_per_min` field, do not use a sum function (for example, `sum(events_per_min)`). Instead, use the count function -and the `summary_count_field_name` property. -//TO-DO: For more information, see <>. +and the `summary_count_field_name` property. For more information, see +<>. [float] [[ml-nonzero-count]] -===== Non_zero_count, High_non_zero_count, Low_non_zero_count +===== Non_zero_count, high_non_zero_count, low_non_zero_count The `non_zero_count` function detects anomalies when the number of events in a bucket is anomalous, but it ignores cases where the bucket count is zero. Use @@ -144,11 +185,21 @@ The `non_zero_count` function models only the following data: .Example 5: Analyzing signatures with the high_non_zero_count function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example5 { - "function" : "high_non_zero_count", - "by_field_name" : "signaturename" + "analysis_config": { + "detectors": [{ + "function" : "high_non_zero_count", + "by_field_name" : "signaturename" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } } -------------------------------------------------- +// CONSOLE If you use this `high_non_zero_count` function in a detector in your job, it models the count of events for the `signaturename` field. It ignores any buckets @@ -163,7 +214,7 @@ data is sparse, use the `count` functions, which are optimized for that scenario [float] [[ml-distinct-count]] -===== Distinct_count, High_distinct_count, Low_distinct_count +===== Distinct_count, high_distinct_count, low_distinct_count The `distinct_count` function detects anomalies where the number of distinct values in one field is unusual. @@ -187,11 +238,21 @@ see {ref}/ml-job-resource.html#ml-detectorconfig[Detector Configuration Objects] .Example 6: Analyzing users with the distinct_count function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example6 { - "function" : "distinct_count", - "field_name" : "user" + "analysis_config": { + "detectors": [{ + "function" : "distinct_count", + "field_name" : "user" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } } -------------------------------------------------- +// CONSOLE This `distinct_count` function detects when a system has an unusual number of logged in users. When you use this function in a detector in your job, it @@ -201,12 +262,22 @@ users is unusual compared to the past. .Example 7: Analyzing ports with the high_distinct_count function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example7 { - "function" : "high_distinct_count", - "field_name" : "dst_port", - "over_field_name": "src_ip" + "analysis_config": { + "detectors": [{ + "function" : "high_distinct_count", + "field_name" : "dst_port", + "over_field_name": "src_ip" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } } -------------------------------------------------- +// CONSOLE This example detects instances of port scanning. When you use this function in a detector in your job, it models the distinct count of ports. It also detects the diff --git a/x-pack/docs/en/ml/functions/geo.asciidoc b/x-pack/docs/en/ml/functions/geo.asciidoc index cc98e95bf2069..e9685b46e1677 100644 --- a/x-pack/docs/en/ml/functions/geo.asciidoc +++ b/x-pack/docs/en/ml/functions/geo.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-geo-functions]] -=== Geographic Functions +=== Geographic functions The geographic functions detect anomalies in the geographic location of the input data. @@ -28,12 +29,22 @@ see {ref}/ml-job-resource.html#ml-detectorconfig[Detector Configuration Objects] .Example 1: Analyzing transactions with the lat_long function [source,js] -------------------------------------------------- +PUT _xpack/ml/anomaly_detectors/example1 { - "function" : "lat_long", - "field_name" : "transactionCoordinates", - "by_field_name" : "creditCardNumber" + "analysis_config": { + "detectors": [{ + "function" : "lat_long", + "field_name" : "transactionCoordinates", + "by_field_name" : "creditCardNumber" + }] + }, + "data_description": { + "time_field":"timestamp", + "time_format": "epoch_ms" + } } -------------------------------------------------- +// CONSOLE If you use this `lat_long` function in a detector in your job, it detects anomalies where the geographic location of a credit card transaction is @@ -54,6 +65,7 @@ For example, JSON data might contain the following transaction coordinates: "creditCardNumber": "1234123412341234" } -------------------------------------------------- +// NOTCONSOLE In {es}, location data is likely to be stored in `geo_point` fields. For more information, see {ref}/geo-point.html[Geo-point datatype]. This data type is not @@ -64,7 +76,15 @@ format. For example, the following Painless script transforms [source,js] -------------------------------------------------- +PUT _xpack/ml/datafeeds/datafeed-test2 { + "job_id": "farequote", + "indices": ["farequote"], + "query": { + "match_all": { + "boost": 1 + } + }, "script_fields": { "lat-lon": { "script": { @@ -75,5 +95,7 @@ format. For example, the following Painless script transforms } } -------------------------------------------------- +// CONSOLE +// TEST[setup:farequote_job] For more information, see <>. diff --git a/x-pack/docs/en/ml/functions/info.asciidoc b/x-pack/docs/en/ml/functions/info.asciidoc index f964d4eb3ec67..2c3117e0e5644 100644 --- a/x-pack/docs/en/ml/functions/info.asciidoc +++ b/x-pack/docs/en/ml/functions/info.asciidoc @@ -40,6 +40,7 @@ For more information about those properties, see "over_field_name" : "highest_registered_domain" } -------------------------------------------------- +// NOTCONSOLE If you use this `info_content` function in a detector in your job, it models information that is present in the `subdomain` string. It detects anomalies @@ -60,6 +61,7 @@ choice. "over_field_name" : "src_ip" } -------------------------------------------------- +// NOTCONSOLE If you use this `high_info_content` function in a detector in your job, it models information content that is held in the DNS query string. It detects @@ -77,6 +79,7 @@ information content is higher than expected. "by_field_name" : "logfilename" } -------------------------------------------------- +// NOTCONSOLE If you use this `low_info_content` function in a detector in your job, it models information content that is present in the message string for each diff --git a/x-pack/docs/en/ml/functions/metric.asciidoc b/x-pack/docs/en/ml/functions/metric.asciidoc index 495fc6f333575..3ee5179702720 100644 --- a/x-pack/docs/en/ml/functions/metric.asciidoc +++ b/x-pack/docs/en/ml/functions/metric.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-metric-functions]] -=== Metric Functions +=== Metric functions The metric functions include functions such as mean, min and max. These values are calculated for each bucket. Field values that cannot be converted to @@ -42,6 +43,7 @@ For more information about those properties, see "by_field_name" : "product" } -------------------------------------------------- +// NOTCONSOLE If you use this `min` function in a detector in your job, it detects where the smallest transaction is lower than previously observed. You can use this @@ -76,6 +78,7 @@ For more information about those properties, see "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `max` function in a detector in your job, it detects where the longest `responsetime` is longer than previously observed. You can use this @@ -98,6 +101,7 @@ to previous applications. "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE The analysis in the previous example can be performed alongside `high_mean` functions by application. By combining detectors and using the same influencer @@ -106,7 +110,7 @@ response times for each bucket. [float] [[ml-metric-median]] -==== Median, High_median, Low_median +==== Median, high_median, low_median The `median` function detects anomalies in the statistical median of a value. The median value is calculated for each bucket. @@ -136,6 +140,7 @@ For more information about those properties, see "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `median` function in a detector in your job, it models the median `responsetime` for each application over time. It detects when the median @@ -143,7 +148,7 @@ median `responsetime` for each application over time. It detects when the median [float] [[ml-metric-mean]] -==== Mean, High_mean, Low_mean +==== Mean, high_mean, low_mean The `mean` function detects anomalies in the arithmetic mean of a value. The mean value is calculated for each bucket. @@ -173,6 +178,7 @@ For more information about those properties, see "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `mean` function in a detector in your job, it models the mean `responsetime` for each application over time. It detects when the mean @@ -187,6 +193,7 @@ If you use this `mean` function in a detector in your job, it models the mean "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `high_mean` function in a detector in your job, it models the mean `responsetime` for each application over time. It detects when the mean @@ -201,6 +208,7 @@ mean `responsetime` for each application over time. It detects when the mean "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `low_mean` function in a detector in your job, it models the mean `responsetime` for each application over time. It detects when the mean @@ -237,6 +245,7 @@ For more information about those properties, see "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `metric` function in a detector in your job, it models the mean, min, and max `responsetime` for each application over time. It detects @@ -245,7 +254,7 @@ when the mean, min, or max `responsetime` is unusual compared to previous [float] [[ml-metric-varp]] -==== Varp, High_varp, Low_varp +==== Varp, high_varp, low_varp The `varp` function detects anomalies in the variance of a value which is a measure of the variability and spread in the data. @@ -273,6 +282,7 @@ For more information about those properties, see "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `varp` function in a detector in your job, it models the variance in values of `responsetime` for each application over time. It detects @@ -288,6 +298,7 @@ behavior. "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `high_varp` function in a detector in your job, it models the variance in values of `responsetime` for each application over time. It detects @@ -303,6 +314,7 @@ behavior. "by_field_name" : "application" } -------------------------------------------------- +// NOTCONSOLE If you use this `low_varp` function in a detector in your job, it models the variance in values of `responsetime` for each application over time. It detects diff --git a/x-pack/docs/en/ml/functions/rare.asciidoc b/x-pack/docs/en/ml/functions/rare.asciidoc index 2485605557cfa..fc30918b508f1 100644 --- a/x-pack/docs/en/ml/functions/rare.asciidoc +++ b/x-pack/docs/en/ml/functions/rare.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-rare-functions]] -=== Rare Functions +=== Rare functions The rare functions detect values that occur rarely in time or rarely for a population. @@ -54,6 +55,7 @@ For more information about those properties, see "by_field_name" : "status" } -------------------------------------------------- +// NOTCONSOLE If you use this `rare` function in a detector in your job, it detects values that are rare in time. It models status codes that occur over time and detects @@ -69,6 +71,7 @@ status codes in a web access log that have never (or rarely) occurred before. "over_field_name" : "clientip" } -------------------------------------------------- +// NOTCONSOLE If you use this `rare` function in a detector in your job, it detects values that are rare in a population. It models status code and client IP interactions @@ -111,6 +114,7 @@ For more information about those properties, see "over_field_name" : "clientip" } -------------------------------------------------- +// NOTCONSOLE If you use this `freq_rare` function in a detector in your job, it detects values that are frequently rare in a population. It models URI paths and diff --git a/x-pack/docs/en/ml/functions/sum.asciidoc b/x-pack/docs/en/ml/functions/sum.asciidoc index 3a0f0b264e9ef..7a95ad63fccee 100644 --- a/x-pack/docs/en/ml/functions/sum.asciidoc +++ b/x-pack/docs/en/ml/functions/sum.asciidoc @@ -1,6 +1,6 @@ - +[role="xpack"] [[ml-sum-functions]] -=== Sum Functions +=== Sum functions The sum functions detect anomalies when the sum of a field in a bucket is anomalous. @@ -16,16 +16,9 @@ The {xpackml} features include the following sum functions: * xref:ml-sum[`sum`, `high_sum`, `low_sum`] * xref:ml-nonnull-sum[`non_null_sum`, `high_non_null_sum`, `low_non_null_sum`] -//// -TBD: Incorporate from prelert docs?: -Input data may contain pre-calculated fields giving the total count of some value e.g. transactions per minute. -Ensure you are familiar with our advice on Summarization of Input Data, as this is likely to provide -a more appropriate method to using the sum function. -//// - [float] [[ml-sum]] -==== Sum, High_sum, Low_sum +==== Sum, high_sum, low_sum The `sum` function detects anomalies where the sum of a field in a bucket is anomalous. @@ -54,6 +47,7 @@ For more information about those properties, see "over_field_name" : "employee" } -------------------------------------------------- +// NOTCONSOLE If you use this `sum` function in a detector in your job, it models total expenses per employees for each cost center. For each time bucket, @@ -69,6 +63,7 @@ to other employees. "over_field_name" : "cs_host" } -------------------------------------------------- +// NOTCONSOLE If you use this `high_sum` function in a detector in your job, it models total `cs_bytes`. It detects `cs_hosts` that transfer unusually high @@ -79,7 +74,7 @@ to find users that are abusing internet privileges. [float] [[ml-nonnull-sum]] -==== Non_null_sum, High_non_null_sum, Low_non_null_sum +==== Non_null_sum, high_non_null_sum, low_non_null_sum The `non_null_sum` function is useful if your data is sparse. Buckets without values are ignored and buckets with a zero value are analyzed. @@ -110,6 +105,7 @@ is not applicable for this function. "byFieldName" : "employee" } -------------------------------------------------- +// NOTCONSOLE If you use this `high_non_null_sum` function in a detector in your job, it models the total `amount_approved` for each employee. It ignores any buckets diff --git a/x-pack/docs/en/ml/functions/time.asciidoc b/x-pack/docs/en/ml/functions/time.asciidoc index a8067e2ca1342..ac8199307f130 100644 --- a/x-pack/docs/en/ml/functions/time.asciidoc +++ b/x-pack/docs/en/ml/functions/time.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-time-functions]] -=== Time Functions +=== Time functions The time functions detect events that happen at unusual times, either of the day or of the week. These functions can be used to find unusual patterns of behavior, @@ -60,6 +61,7 @@ For more information about those properties, see "by_field_name" : "process" } -------------------------------------------------- +// NOTCONSOLE If you use this `time_of_day` function in a detector in your job, it models when events occur throughout a day for each process. It detects when an @@ -91,6 +93,7 @@ For more information about those properties, see "over_field_name" : "workstation" } -------------------------------------------------- +// NOTCONSOLE If you use this `time_of_week` function in a detector in your job, it models when events occur throughout the week for each `eventcode`. It detects diff --git a/x-pack/docs/en/ml/populations.asciidoc b/x-pack/docs/en/ml/populations.asciidoc index 53e10ce8d41b6..bf0dd2ad7d7bb 100644 --- a/x-pack/docs/en/ml/populations.asciidoc +++ b/x-pack/docs/en/ml/populations.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-configuring-pop]] -=== Performing Population Analysis +=== Performing population analysis Entities or events in your data can be considered anomalous when: diff --git a/x-pack/docs/en/ml/stopping-ml.asciidoc b/x-pack/docs/en/ml/stopping-ml.asciidoc index 862fe5cf05061..c0be2d947cdc7 100644 --- a/x-pack/docs/en/ml/stopping-ml.asciidoc +++ b/x-pack/docs/en/ml/stopping-ml.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[stopping-ml]] -== Stopping Machine Learning +== Stopping machine learning An orderly shutdown of {ml} ensures that: @@ -24,10 +25,10 @@ request stops the `feed1` {dfeed}: [source,js] -------------------------------------------------- -POST _xpack/ml/datafeeds/feed1/_stop +POST _xpack/ml/datafeeds/datafeed-total-requests/_stop -------------------------------------------------- // CONSOLE -// TEST[skip:todo] +// TEST[setup:server_metrics_startdf] NOTE: You must have `manage_ml`, or `manage` cluster privileges to stop {dfeeds}. For more information, see <>. @@ -63,10 +64,10 @@ example, the following request closes the `job1` job: [source,js] -------------------------------------------------- -POST _xpack/ml/anomaly_detectors/job1/_close +POST _xpack/ml/anomaly_detectors/total-requests/_close -------------------------------------------------- // CONSOLE -// TEST[skip:todo] +// TEST[setup:server_metrics_openjob] NOTE: You must have `manage_ml`, or `manage` cluster privileges to stop {dfeeds}. For more information, see <>. diff --git a/x-pack/docs/en/ml/transforms.asciidoc b/x-pack/docs/en/ml/transforms.asciidoc index 9789518081be2..c4b4d56029748 100644 --- a/x-pack/docs/en/ml/transforms.asciidoc +++ b/x-pack/docs/en/ml/transforms.asciidoc @@ -1,5 +1,6 @@ +[role="xpack"] [[ml-configuring-transform]] -=== Transforming Data With Script Fields +=== Transforming data with script fields If you use {dfeeds}, you can add scripts to transform your data before it is analyzed. {dfeeds-cap} contain an optional `script_fields` property, where @@ -602,10 +603,3 @@ The preview {dfeed} API returns the following results, which show that ] ---------------------------------- // TESTRESPONSE - -//// -==== Configuring Script Fields in {dfeeds-cap} - -//TO-DO: Add Kibana steps from -//https://github.com/elastic/prelert-legacy/wiki/Transforming-data-with-script_fields#transforming-geo_point-data-to-a-workable-string-format -//// From 401800d95840a91fb0c8896251d2b6e7972a707c Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Jun 2018 17:06:09 -0700 Subject: [PATCH 40/92] Core: Remove index name resolver from base TransportAction (#31002) Most transport actions don't need to resolve index names. This commit removes the index name resolver as a super constructor parameter for TransportAction. The actions that do need the resolver then have a member added to keep the resolver from their own constructor. --- .../action/bulk/TransportNoopBulkAction.java | 5 ++--- .../search/TransportNoopSearchAction.java | 10 +++++----- .../ingest/common/GrokProcessorGetAction.java | 6 ++---- .../TransportMultiSearchTemplateAction.java | 9 +++------ .../TransportSearchTemplateAction.java | 7 ++++--- .../painless/PainlessExecuteAction.java | 6 ++---- .../rankeval/TransportRankEvalAction.java | 12 ++++++------ .../reindex/TransportDeleteByQueryAction.java | 11 ++++++----- .../index/reindex/TransportReindexAction.java | 6 ++++-- .../reindex/TransportRethrottleAction.java | 8 +++----- .../reindex/TransportUpdateByQueryAction.java | 9 ++++----- .../TransportNodesHotThreadsAction.java | 8 +++----- .../node/info/TransportNodesInfoAction.java | 9 +++------ ...ansportNodesReloadSecureSettingsAction.java | 7 +++---- .../node/stats/TransportNodesStatsAction.java | 6 ++---- .../cancel/TransportCancelTasksAction.java | 7 ++----- .../node/tasks/get/TransportGetTaskAction.java | 6 ++---- .../tasks/list/TransportListTasksAction.java | 7 +++---- .../node/usage/TransportNodesUsageAction.java | 8 +++----- .../remote/TransportRemoteInfoAction.java | 10 +++++----- .../status/TransportNodesSnapshotsStatus.java | 12 +++++------- .../stats/TransportClusterStatsAction.java | 11 ++++------- .../flush/TransportSyncedFlushAction.java | 12 ++++++------ .../get/TransportGetFieldMappingsAction.java | 4 +++- .../action/bulk/TransportBulkAction.java | 4 +++- .../TransportFieldCapabilitiesAction.java | 9 ++++----- .../action/get/TransportMultiGetAction.java | 5 +++-- .../SimulatePipelineTransportAction.java | 8 +++++--- .../action/main/TransportMainAction.java | 6 ++---- .../search/TransportClearScrollAction.java | 4 +--- .../search/TransportMultiSearchAction.java | 10 ++++------ .../action/search/TransportSearchAction.java | 6 +++++- .../search/TransportSearchScrollAction.java | 7 +++---- .../action/support/HandledTransportAction.java | 18 ++++++++---------- .../action/support/TransportAction.java | 5 +---- .../broadcast/TransportBroadcastAction.java | 4 +++- .../node/TransportBroadcastByNodeAction.java | 4 +++- .../master/TransportMasterNodeAction.java | 9 ++++++--- .../support/nodes/TransportNodesAction.java | 7 ++----- .../TransportBroadcastReplicationAction.java | 4 +++- .../TransportReplicationAction.java | 4 +++- ...TransportInstanceSingleOperationAction.java | 4 +++- .../shard/TransportSingleShardAction.java | 5 +++-- .../support/tasks/TransportTasksAction.java | 11 ++++------- .../TransportMultiTermVectorsAction.java | 5 +++-- .../TransportNodesListGatewayMetaState.java | 9 +++------ ...TransportNodesListGatewayStartedShards.java | 10 +++------- .../TransportNodesListShardStoreMetaData.java | 6 ++---- .../action/ActionModuleTests.java | 2 +- .../node/tasks/TaskManagerTestCase.java | 11 ++++------- .../cluster/node/tasks/TestTaskPlugin.java | 7 +++---- .../node/tasks/TransportTasksActionTests.java | 3 +-- .../action/main/MainActionTests.java | 3 +-- .../search/MultiSearchActionTookTests.java | 6 +++--- .../TransportMultiSearchActionTests.java | 4 ++-- .../TransportActionFilterChainTests.java | 4 ++-- .../nodes/TransportNodesActionTests.java | 2 +- .../client/node/NodeClientHeadersTests.java | 2 +- .../persistent/TestPersistentTasksPlugin.java | 4 ++-- .../core/action/TransportXPackInfoAction.java | 8 +++----- .../TransportGetCertificateInfoAction.java | 4 +--- .../action/TransportXPackInfoActionTests.java | 3 +-- .../action/TransportGraphExploreAction.java | 8 ++++---- .../ml/action/TransportCloseJobAction.java | 10 ++++------ .../action/TransportDeleteCalendarAction.java | 11 +++++------ .../TransportDeleteCalendarEventAction.java | 4 +--- .../TransportDeleteExpiredDataAction.java | 7 ++----- .../ml/action/TransportDeleteFilterAction.java | 10 ++++------ .../TransportDeleteModelSnapshotAction.java | 7 +++---- .../ml/action/TransportFlushJobAction.java | 6 ++---- .../ml/action/TransportForecastJobAction.java | 6 ++---- .../ml/action/TransportGetBucketsAction.java | 10 +++++----- .../TransportGetCalendarEventsAction.java | 10 ++++------ .../ml/action/TransportGetCalendarsAction.java | 9 +++------ .../action/TransportGetCategoriesAction.java | 10 +++++----- .../ml/action/TransportGetFiltersAction.java | 9 +++------ .../action/TransportGetInfluencersAction.java | 10 +++++----- .../ml/action/TransportGetJobsStatsAction.java | 4 +--- .../TransportGetModelSnapshotsAction.java | 8 +++----- .../TransportGetOverallBucketsAction.java | 10 +++++----- .../ml/action/TransportGetRecordsAction.java | 10 +++++----- .../action/TransportIsolateDatafeedAction.java | 6 ++---- .../ml/action/TransportJobTaskAction.java | 7 +++---- .../ml/action/TransportKillProcessAction.java | 6 ++---- .../xpack/ml/action/TransportMlInfoAction.java | 9 ++++----- .../ml/action/TransportPersistJobAction.java | 9 +++------ .../TransportPostCalendarEventsAction.java | 9 +++------ .../ml/action/TransportPostDataAction.java | 8 +++----- .../action/TransportPreviewDatafeedAction.java | 9 ++++----- .../ml/action/TransportPutCalendarAction.java | 10 ++++------ .../ml/action/TransportPutFilterAction.java | 5 ++--- .../ml/action/TransportStopDatafeedAction.java | 8 +++----- .../TransportUpdateCalendarJobAction.java | 10 +++------- .../TransportUpdateModelSnapshotAction.java | 8 +++----- .../action/TransportUpdateProcessAction.java | 6 ++---- .../TransportValidateDetectorAction.java | 9 +++++---- .../TransportValidateJobConfigAction.java | 9 +++++---- .../action/TransportCloseJobActionTests.java | 3 +-- .../action/TransportMonitoringBulkAction.java | 7 ++----- .../TransportMonitoringBulkActionTests.java | 11 ++++------- .../action/TransportGetRollupCapsAction.java | 12 ++++-------- .../action/TransportGetRollupJobAction.java | 8 ++------ .../action/TransportPutRollupJobAction.java | 3 --- .../action/TransportRollupSearchAction.java | 6 ++---- .../action/TransportStartRollupAction.java | 7 ++----- .../action/TransportStopRollupAction.java | 7 ++----- .../realm/TransportClearRealmCacheAction.java | 6 ++---- .../role/TransportClearRolesCacheAction.java | 8 +++----- .../action/role/TransportDeleteRoleAction.java | 7 ++----- .../action/role/TransportGetRolesAction.java | 6 ++---- .../action/role/TransportPutRoleAction.java | 4 +--- .../TransportDeleteRoleMappingAction.java | 10 +++------- .../TransportGetRoleMappingsAction.java | 10 +++------- .../TransportPutRoleMappingAction.java | 10 +++------- .../saml/TransportSamlAuthenticateAction.java | 8 +++----- .../TransportSamlInvalidateSessionAction.java | 7 ++----- .../action/saml/TransportSamlLogoutAction.java | 7 ++----- ...ansportSamlPrepareAuthenticationAction.java | 8 +++----- .../token/TransportCreateTokenAction.java | 7 ++----- .../token/TransportInvalidateTokenAction.java | 6 ++---- .../token/TransportRefreshTokenAction.java | 7 ++----- .../user/TransportAuthenticateAction.java | 10 +++++----- .../user/TransportChangePasswordAction.java | 7 ++----- .../action/user/TransportDeleteUserAction.java | 10 +++++----- .../action/user/TransportGetUsersAction.java | 7 ++----- .../user/TransportHasPrivilegesAction.java | 7 ++----- .../action/user/TransportPutUserAction.java | 4 +--- .../action/user/TransportSetEnabledAction.java | 7 ++----- .../role/TransportDeleteRoleActionTests.java | 7 +++---- .../role/TransportGetRolesActionTests.java | 9 ++++----- .../role/TransportPutRoleActionTests.java | 7 +++---- .../TransportGetRoleMappingsActionTests.java | 6 ++---- .../TransportPutRoleMappingActionTests.java | 4 +--- ...nsportSamlInvalidateSessionActionTests.java | 3 +-- .../saml/TransportSamlLogoutActionTests.java | 4 +--- .../user/TransportAuthenticateActionTests.java | 7 +++---- .../TransportChangePasswordActionTests.java | 9 ++++----- .../user/TransportDeleteUserActionTests.java | 11 +++++------ .../user/TransportGetUsersActionTests.java | 13 ++++++------- .../TransportHasPrivilegesActionTests.java | 4 +--- .../user/TransportPutUserActionTests.java | 11 +++++------ .../user/TransportSetEnabledActionTests.java | 11 +++++------ .../plugin/TransportSqlClearCursorAction.java | 13 +++++-------- .../sql/plugin/TransportSqlQueryAction.java | 13 +++++-------- .../plugin/TransportSqlTranslateAction.java | 5 ++--- .../actions/WatcherTransportAction.java | 6 ++---- .../actions/ack/TransportAckWatchAction.java | 8 +++----- .../activate/TransportActivateWatchAction.java | 8 +++----- .../delete/TransportDeleteWatchAction.java | 10 +++++----- .../execute/TransportExecuteWatchAction.java | 10 ++++------ .../actions/get/TransportGetWatchAction.java | 7 ++----- .../actions/put/TransportPutWatchAction.java | 7 ++----- .../stats/TransportWatcherStatsAction.java | 9 +++------ .../ack/TransportAckWatchActionTests.java | 6 ++---- .../put/TransportPutWatchActionTests.java | 6 ++---- .../TransportWatcherStatsActionTests.java | 4 +--- 156 files changed, 466 insertions(+), 689 deletions(-) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java index 0ce008908cf7f..0c1065ad13145 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; @@ -40,8 +39,8 @@ public class TransportNoopBulkAction extends HandledTransportAction { @Inject - public TransportNoopSearchAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters - actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, NoopSearchAction.NAME, threadPool, transportService, actionFilters, SearchRequest::new, - indexNameExpressionResolver); + public TransportNoopSearchAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters) { + super(settings, NoopSearchAction.NAME, threadPool, transportService, actionFilters, + (Writeable.Reader) SearchRequest::new); } @Override diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index 4b74bb800458d..77ad363b50680 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -116,9 +115,8 @@ public static class TransportAction extends HandledTransportAction { @@ -55,11 +55,12 @@ public class TransportSearchTemplateAction extends HandledTransportAction) SearchTemplateRequest::new); this.scriptService = scriptService; this.searchAction = searchAction; this.xContentRegistry = xContentRegistry; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index 5430303feb262..ea0664b2aa446 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; @@ -282,9 +281,8 @@ public static class TransportAction extends HandledTransportAction) RankEvalRequest::new); this.scriptService = scriptService; this.namedXContentRegistry = namedXContentRegistry; this.client = client; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java index e2de5cd4ffc55..35aa8d77d104e 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -19,13 +19,14 @@ package org.elasticsearch.index.reindex; +import java.util.function.Supplier; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -40,10 +41,10 @@ public class TransportDeleteByQueryAction extends HandledTransportAction) DeleteByQueryRequest::new); this.client = client; this.scriptService = scriptService; this.clusterService = clusterService; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 5e0ad0fd3fdd8..3db3a0d2a9123 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -97,18 +97,20 @@ public class TransportReindexAction extends HandledTransportAction { private final Client client; @@ -51,11 +51,10 @@ public class TransportUpdateByQueryAction extends HandledTransportAction) UpdateByQueryRequest::new); this.client = client; this.scriptService = scriptService; this.clusterService = clusterService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index 7b43d1c259b0c..1207300208f91 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; @@ -43,11 +42,10 @@ public class TransportNodesHotThreadsAction extends TransportNodesAction { @Inject - public TransportNodesHotThreadsAction(Settings settings, ThreadPool threadPool, - ClusterService clusterService, TransportService transportService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + public TransportNodesHotThreadsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters) { super(settings, NodesHotThreadsAction.NAME, threadPool, clusterService, transportService, actionFilters, - indexNameExpressionResolver, NodesHotThreadsRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC, NodeHotThreads.class); + NodesHotThreadsRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC, NodeHotThreads.class); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index afe535601fcfc..a422f33fd6efd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; @@ -44,12 +43,10 @@ public class TransportNodesInfoAction extends TransportNodesAction) RemoteInfoRequest::new); this.remoteClusterService = searchTransportService.getRemoteClusterService(); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index 77578546b9585..79c7c776a196f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; @@ -63,12 +62,11 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction) SyncedFlushRequest::new); this.syncedFlushService = syncedFlushService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java index 8729b60e3bf5a..bbf0219fd70e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java @@ -41,14 +41,16 @@ public class TransportGetFieldMappingsAction extends HandledTransportAction { private final ClusterService clusterService; - private final TransportShardMultiGetAction shardAction; + private final IndexNameExpressionResolver indexNameExpressionResolver; @Inject public TransportMultiGetAction(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterService clusterService, TransportShardMultiGetAction shardAction, ActionFilters actionFilters, IndexNameExpressionResolver resolver) { - super(settings, MultiGetAction.NAME, threadPool, transportService, actionFilters, resolver, MultiGetRequest::new); + super(settings, MultiGetAction.NAME, threadPool, transportService, actionFilters, MultiGetRequest::new); this.clusterService = clusterService; this.shardAction = shardAction; + this.indexNameExpressionResolver = resolver; } @Override diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java index d660840e9b794..ea8f44d85f61b 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java @@ -22,8 +22,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.ingest.PipelineStore; @@ -39,8 +39,10 @@ public class SimulatePipelineTransportAction extends HandledTransportAction) SimulatePipelineRequest::new); this.pipelineStore = nodeService.getIngestService().getPipelineStore(); this.executionService = new SimulateExecutionService(threadPool); } diff --git a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java index d560a7ecc11d3..18e704be69c24 100644 --- a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -40,9 +39,8 @@ public class TransportMainAction extends HandledTransportAction searchAction, - IndexNameExpressionResolver resolver, int availableProcessors, LongSupplier relativeTimeProvider) { - super(Settings.EMPTY, MultiSearchAction.NAME, threadPool, transportService, actionFilters, resolver, MultiSearchRequest::new); + int availableProcessors, LongSupplier relativeTimeProvider) { + super(Settings.EMPTY, MultiSearchAction.NAME, threadPool, transportService, actionFilters, MultiSearchRequest::new); this.clusterService = clusterService; this.searchAction = searchAction; this.availableProcessors = availableProcessors; diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index ad3b2efd42f1c..311ba02f523f4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -74,19 +75,22 @@ public class TransportSearchAction extends HandledTransportAction) SearchRequest::new); this.searchPhaseController = searchPhaseController; this.searchTransportService = searchTransportService; this.remoteClusterService = searchTransportService.getRemoteClusterService(); SearchTransportService.registerRequestHandler(transportService, searchService); this.clusterService = clusterService; this.searchService = searchService; + this.indexNameExpressionResolver = indexNameExpressionResolver; } private Map buildPerIndexAliasFilter(SearchRequest request, ClusterState clusterState, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java index 6f230c9bd8b89..77425ecd5dbb2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java @@ -22,9 +22,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -43,10 +43,9 @@ public class TransportSearchScrollAction extends HandledTransportAction) SearchScrollRequest::new); this.clusterService = clusterService; this.searchTransportService = searchTransportService; this.searchPhaseController = searchPhaseController; diff --git a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index c584db106992c..d6febf828765b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; @@ -39,29 +38,28 @@ public abstract class HandledTransportAction extends TransportAction { protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + ActionFilters actionFilters, Supplier request) { - this(settings, actionName, true, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); + this(settings, actionName, true, threadPool, transportService, actionFilters, request); } protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, Writeable.Reader requestReader, - IndexNameExpressionResolver indexNameExpressionResolver) { - this(settings, actionName, true, threadPool, transportService, actionFilters, requestReader, indexNameExpressionResolver); + ActionFilters actionFilters, Writeable.Reader requestReader) { + this(settings, actionName, true, threadPool, transportService, actionFilters, requestReader); } protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, Supplier request) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); + Supplier request) { + super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, request, ThreadPool.Names.SAME, false, canTripCircuitBreaker, new TransportHandler()); } protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, - Writeable.Reader requestReader, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); + Writeable.Reader requestReader) { + super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, ThreadPool.Names.SAME, false, canTripCircuitBreaker, requestReader, new TransportHandler()); } diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java index 6207f333ceddf..c8d9849c2e58a 100644 --- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; @@ -39,16 +38,14 @@ public abstract class TransportAction request, Supplier shardRequest, String shardExecutor) { - super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); + super(settings, actionName, threadPool, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.transportShardAction = actionName + "[s]"; transportService.registerRequestHandler(transportShardAction, shardRequest, shardExecutor, new ShardTransportHandler()); diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index ff4e73acc1877..ca50e2acd147e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -81,6 +81,7 @@ public abstract class TransportBroadcastByNodeAction request, String executor, boolean canTripCircuitBreaker) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, indexNameExpressionResolver, + super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; + this.indexNameExpressionResolver = indexNameExpressionResolver; transportNodeBroadcastAction = actionName + "[n]"; diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 42d7da118460e..1881db0f13e42 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -56,6 +56,7 @@ public abstract class TransportMasterNodeAction, Response extends ActionResponse> extends HandledTransportAction { protected final TransportService transportService; protected final ClusterService clusterService; + protected final IndexNameExpressionResolver indexNameExpressionResolver; final String executor; @@ -74,10 +75,11 @@ protected TransportMasterNodeAction(Settings settings, String actionName, Transp protected TransportMasterNodeAction(Settings settings, String actionName, boolean canTripCircuitBreaker, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Supplier request) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, indexNameExpressionResolver, + super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, request); this.transportService = transportService; this.clusterService = clusterService; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor(); } @@ -85,10 +87,11 @@ protected TransportMasterNodeAction(Settings settings, String actionName, boolea TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, Writeable.Reader request, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, request, - indexNameExpressionResolver); + super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, request + ); this.transportService = transportService; this.clusterService = clusterService; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor(); } diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 0b61c7ed71247..d47e156680e28 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; @@ -63,11 +62,9 @@ public abstract class TransportNodesAction request, Supplier nodeRequest, - String nodeExecutor, + Supplier request, Supplier nodeRequest, String nodeExecutor, Class nodeResponseClass) { - super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); + super(settings, actionName, threadPool, transportService, actionFilters, request); this.clusterService = Objects.requireNonNull(clusterService); this.transportService = Objects.requireNonNull(transportService); this.nodeResponseClass = Objects.requireNonNull(nodeResponseClass); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index 4cad1c211700d..d3d54880f504f 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -56,13 +56,15 @@ public abstract class TransportBroadcastReplicationAction request, Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportReplicationAction replicatedBroadcastShardAction) { - super(settings, name, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); + super(settings, name, threadPool, transportService, actionFilters, request); this.replicatedBroadcastShardAction = replicatedBroadcastShardAction; this.clusterService = clusterService; + this.indexNameExpressionResolver = indexNameExpressionResolver; } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index e06c771481f42..97f985806168b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -104,6 +104,7 @@ public abstract class TransportReplicationAction< protected final ClusterService clusterService; protected final ShardStateAction shardStateAction; protected final IndicesService indicesService; + protected final IndexNameExpressionResolver indexNameExpressionResolver; protected final TransportRequestOptions transportOptions; protected final String executor; @@ -131,11 +132,12 @@ protected TransportReplicationAction(Settings settings, String actionName, Trans IndexNameExpressionResolver indexNameExpressionResolver, Supplier request, Supplier replicaRequest, String executor, boolean syncGlobalCheckpointAfterOperation) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); + super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); this.transportService = transportService; this.clusterService = clusterService; this.indicesService = indicesService; this.shardStateAction = shardStateAction; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor; this.transportPrimaryAction = actionName + "[p]"; diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index b75828327035b..c907c12ac5161 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -52,6 +52,7 @@ public abstract class TransportInstanceSingleOperationAction { protected final ClusterService clusterService; protected final TransportService transportService; + protected final IndexNameExpressionResolver indexNameExpressionResolver; final String executor; final String shardActionName; @@ -59,9 +60,10 @@ public abstract class TransportInstanceSingleOperationAction request) { - super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); + super(settings, actionName, threadPool, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor(); this.shardActionName = actionName + "[s]"; transportService.registerRequestHandler(shardActionName, request, executor, new ShardTransportHandler()); diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 1a3a3c761c892..6c5d55c8c4404 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -61,8 +61,8 @@ public abstract class TransportSingleShardAction, Response extends ActionResponse> extends TransportAction { protected final ClusterService clusterService; - protected final TransportService transportService; + protected final IndexNameExpressionResolver indexNameExpressionResolver; final String transportShardAction; final String executor; @@ -70,9 +70,10 @@ public abstract class TransportSingleShardAction request, String executor) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); + super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); this.clusterService = clusterService; this.transportService = transportService; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.transportShardAction = actionName + "[s]"; this.executor = executor; diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index aad7d20073c3b..f852b5efb1aa3 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; @@ -78,12 +77,10 @@ public abstract class TransportTasksAction< protected final String transportNodeAction; - protected TransportTasksAction(Settings settings, String actionName, ThreadPool threadPool, - ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, Supplier requestSupplier, - Supplier responseSupplier, - String nodeExecutor) { - super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, requestSupplier); + protected TransportTasksAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters, Supplier requestSupplier, + Supplier responseSupplier, String nodeExecutor) { + super(settings, actionName, threadPool, transportService, actionFilters, requestSupplier); this.clusterService = clusterService; this.transportService = transportService; this.transportNodeAction = actionName + "[n]"; diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java index 1d164087ed023..9a3fc7b84c287 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -41,16 +41,17 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction { private final ClusterService clusterService; - private final TransportShardMultiTermsVectorAction shardAction; + private final IndexNameExpressionResolver indexNameExpressionResolver; @Inject public TransportMultiTermVectorsAction(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterService clusterService, TransportShardMultiTermsVectorAction shardAction, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, MultiTermVectorsAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, MultiTermVectorsRequest::new); + super(settings, MultiTermVectorsAction.NAME, threadPool, transportService, actionFilters, MultiTermVectorsRequest::new); this.clusterService = clusterService; this.shardAction = shardAction; + this.indexNameExpressionResolver = indexNameExpressionResolver; } @Override diff --git a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java index 4e498d393e2e1..0a01c7cdabbe3 100644 --- a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java @@ -30,7 +30,6 @@ import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; @@ -56,12 +55,10 @@ public class TransportNodesListGatewayMetaState extends TransportNodesAction { protected FakeTransportAction(Settings settings, String actionName, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TaskManager taskManager) { - super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, taskManager); + super(settings, actionName, threadPool, actionFilters, taskManager); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 4baf184e22b96..4cb9cd27e7fc7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -32,7 +32,6 @@ import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -148,8 +147,8 @@ abstract class AbstractTestNodesAction request, Supplier nodeRequest) { super(settings, actionName, threadPool, clusterService, transportService, - new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), - request, nodeRequest, ThreadPool.Names.GENERIC, NodeResponse.class); + new ActionFilters(new HashSet<>()), + request, nodeRequest, ThreadPool.Names.GENERIC, NodeResponse.class); } @Override @@ -192,12 +191,10 @@ protected TaskManager createTaskManager(Settings settings, ThreadPool threadPool transportService.start(); clusterService = createClusterService(threadPool, discoveryNode.get()); clusterService.addStateApplier(transportService.getTaskManager()); - IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(settings); ActionFilters actionFilters = new ActionFilters(emptySet()); - transportListTasksAction = new TransportListTasksAction(settings, threadPool, clusterService, transportService, - actionFilters, indexNameExpressionResolver); + transportListTasksAction = new TransportListTasksAction(settings, threadPool, clusterService, transportService, actionFilters); transportCancelTasksAction = new TransportCancelTasksAction(settings, threadPool, clusterService, - transportService, actionFilters, indexNameExpressionResolver); + transportService, actionFilters); transportService.acceptIncomingRequests(); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index bac8f4a8730da..0cfe532b8a012 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -37,7 +37,6 @@ import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -269,8 +268,8 @@ public static class TransportTestTaskAction extends TransportNodesAction()), new IndexNameExpressionResolver(Settings.EMPTY), - NodesRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC, NodeResponse.class); + new ActionFilters(new HashSet<>()), + NodesRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC, NodeResponse.class); } @Override @@ -429,7 +428,7 @@ public TransportUnblockTestTasksAction(Settings settings,ThreadPool threadPool, clusterService, TransportService transportService) { super(settings, UnblockTestTasksAction.NAME, threadPool, clusterService, transportService, new ActionFilters(new - HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), + HashSet<>()), UnblockTestTasksRequest::new, UnblockTestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index fd6f68d4200da..33b815e4fbf22 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.action.support.tasks.TransportTasksAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -258,7 +257,7 @@ abstract static class TestTasksAction extends TransportTasksAction()), - new IndexNameExpressionResolver(Settings.EMPTY), TestTasksRequest::new, TestTasksResponse::new, + TestTasksRequest::new, TestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } diff --git a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java index 035cc0be4a5e5..1c1c0f9476de3 100644 --- a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; @@ -70,7 +69,7 @@ public void testMainActionClusterAvailable() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportMainAction action = new TransportMainAction(settings, mock(ThreadPool.class), transportService, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), clusterService); + clusterService); AtomicReference responseRef = new AtomicReference<>(); action.doExecute(new MainRequest(), new ActionListener() { @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index cccc0219f2242..39e9ec805e070 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -149,7 +149,7 @@ public TaskManager getTaskManager() { final Set requests = Collections.newSetFromMap(Collections.synchronizedMap(new IdentityHashMap<>())); TransportAction searchAction = new TransportAction(Settings.EMPTY, - "action", threadPool, actionFilters, resolver, taskManager) { + "action", threadPool, actionFilters, taskManager) { @Override protected void doExecute(SearchRequest request, ActionListener listener) { requests.add(request); @@ -161,7 +161,7 @@ protected void doExecute(SearchRequest request, ActionListener l }; if (controlledClock) { - return new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, resolver, + return new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, availableProcessors, expected::get) { @Override void executeSearch(final Queue requests, final AtomicArray responses, @@ -171,7 +171,7 @@ void executeSearch(final Queue requests, final AtomicArray requests = Collections.newSetFromMap(Collections.synchronizedMap(new IdentityHashMap<>())); TransportAction searchAction = new TransportAction - (Settings.EMPTY, "action", threadPool, actionFilters, resolver, taskManager) { + (Settings.EMPTY, "action", threadPool, actionFilters, taskManager) { @Override protected void doExecute(SearchRequest request, ActionListener listener) { requests.add(request); @@ -126,7 +126,7 @@ protected void doExecute(SearchRequest request, ActionListener l }; TransportMultiSearchAction action = - new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, resolver, 10, + new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, 10, System::nanoTime); // Execute the multi search api and fail if we find an error after executing: diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index d576d440c0263..3a31422dcf83f 100644 --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -80,7 +80,7 @@ public void testActionFiltersRequest() throws ExecutionException, InterruptedExc String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = - new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null, + new TransportAction(Settings.EMPTY, actionName, null, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override protected void doExecute(TestRequest request, ActionListener listener) { @@ -158,7 +158,7 @@ public void exe String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = new TransportAction(Settings.EMPTY, - actionName, null, actionFilters, null, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { + actionName, null, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override protected void doExecute(TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); diff --git a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index 60a46876a7126..8a79da044b915 100644 --- a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -250,7 +250,7 @@ private static class TestTransportNodesAction transportService, ActionFilters actionFilters, Supplier request, Supplier nodeRequest, String nodeExecutor) { super(settings, "indices:admin/test", threadPool, clusterService, transportService, actionFilters, - null, request, nodeRequest, nodeExecutor, TestNodeResponse.class); + request, nodeRequest, nodeExecutor, TestNodeResponse.class); } @Override diff --git a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index dc1f146b452de..a289e9680b4aa 100644 --- a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -59,7 +59,7 @@ private Actions(Settings settings, ThreadPool threadPool, Action[] actions) { private static class InternalTransportAction extends TransportAction { private InternalTransportAction(Settings settings, String actionName, ThreadPool threadPool) { - super(settings, actionName, threadPool, EMPTY_FILTERS, null, new TaskManager(settings, threadPool, Collections.emptySet())); + super(settings, actionName, threadPool, EMPTY_FILTERS, new TaskManager(settings, threadPool, Collections.emptySet())); } @Override diff --git a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java index ddcda1058039c..e54641bef2f54 100644 --- a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java +++ b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java @@ -514,8 +514,8 @@ public static class TransportTestTaskAction extends TransportTasksAction featureSets) { - super(settings, XPackInfoAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, - XPackInfoRequest::new); + ActionFilters actionFilters, LicenseService licenseService, Set featureSets) { + super(settings, XPackInfoAction.NAME, threadPool, transportService, actionFilters, + XPackInfoRequest::new); this.licenseService = licenseService; this.featureSets = featureSets; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java index 3670efeeeeee9..16e2a74dac81a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -28,10 +27,9 @@ public class TransportGetCertificateInfoAction extends HandledTransportAction null, null, Collections.emptySet()); TransportXPackInfoAction action = new TransportXPackInfoAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), licenseService, featureSets); + mock(ActionFilters.class), licenseService, featureSets); License license = mock(License.class); long expiryDate = randomLong(); diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 994f5c485780f..07035967d2abf 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -58,6 +57,7 @@ import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Supplier; /** * Performs a series of elasticsearch queries and aggregations to explore @@ -83,10 +83,10 @@ protected boolean lessThan(Vertex a, Vertex b) { @Inject public TransportGraphExploreAction(Settings settings, ThreadPool threadPool, TransportSearchAction transportSearchAction, - TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + TransportService transportService, ActionFilters actionFilters, XPackLicenseState licenseState) { - super(settings, GraphExploreAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, - GraphExploreRequest::new); + super(settings, GraphExploreAction.NAME, threadPool, transportService, actionFilters, + (Supplier)GraphExploreRequest::new); this.searchAction = transportSearchAction; this.licenseState = licenseState; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java index 083d4ce5b1514..bc1d50c7cd99d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -64,13 +63,12 @@ public class TransportCloseJobAction extends TransportTasksAction) DeleteCalendarAction.Request::new); this.client = client; this.jobManager = jobManager; this.jobProvider = jobProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java index 91ae2c118c8cb..2e4b688fa2619 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; @@ -44,10 +43,9 @@ public class TransportDeleteCalendarEventAction extends HandledTransportAction) DeleteFilterAction.Request::new); this.clusterService = clusterService; this.client = client; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java index 36abf5f95d4dc..23ca3693df632 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -39,10 +38,10 @@ public class TransportDeleteModelSnapshotAction extends HandledTransportAction) GetBucketsAction.Request::new); this.jobProvider = jobProvider; this.jobManager = jobManager; this.client = client; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java index c81bb2642236d..da2d2d7970fc6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -27,6 +26,7 @@ import java.util.Collections; import java.util.List; +import java.util.function.Supplier; public class TransportGetCalendarEventsAction extends HandledTransportAction { @@ -35,12 +35,10 @@ public class TransportGetCalendarEventsAction extends HandledTransportAction) GetCalendarEventsAction.Request::new); this.jobProvider = jobProvider; this.clusterService = clusterService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java index 46cfac5519d62..5645d1e1f2d26 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -27,12 +26,10 @@ public class TransportGetCalendarsAction extends HandledTransportAction) GetCategoriesAction.Request::new); this.jobProvider = jobProvider; this.client = client; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index 4264fa2fc2f57..28d75956df059 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; @@ -50,12 +49,10 @@ public class TransportGetFiltersAction extends HandledTransportAction) GetInfluencersAction.Request::new); this.jobProvider = jobProvider; this.client = client; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index 78bfe2c7bc6b0..c0b383b55ced0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Tuple; @@ -56,10 +55,9 @@ public class TransportGetJobsStatsAction extends TransportTasksAction) GetOverallBucketsAction.Request::new); this.clusterService = clusterService; this.client = client; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java index 879b8c5bba029..7c5fee97d5647 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java @@ -5,11 +5,12 @@ */ package org.elasticsearch.xpack.ml.action; +import java.util.function.Supplier; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -27,10 +28,9 @@ public class TransportGetRecordsAction extends HandledTransportAction) GetRecordsAction.Request::new); this.jobProvider = jobProvider; this.jobManager = jobManager; this.client = client; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java index 8916f6ba084fc..0d3b8dfa38dbe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -35,10 +34,9 @@ public class TransportIsolateDatafeedAction extends TransportTasksAction requestSupplier, + Supplier requestSupplier, Supplier responseSupplier, String nodeExecutor, AutodetectProcessManager processManager) { - super(settings, actionName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - requestSupplier, responseSupplier, nodeExecutor); + super(settings, actionName, threadPool, clusterService, transportService, actionFilters, + requestSupplier, responseSupplier, nodeExecutor); this.processManager = processManager; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java index 9d2eae1073722..cc2f70eadeae5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; @@ -37,10 +36,9 @@ public class TransportKillProcessAction extends TransportJobTaskAction { @@ -31,10 +31,9 @@ public class TransportMlInfoAction extends HandledTransportAction) MlInfoAction.Request::new); this.clusterService = clusterService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java index 113a8da7be31a..1fbbb7a368152 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; @@ -31,11 +30,9 @@ public class TransportPersistJobAction extends TransportJobTaskAction { @@ -41,10 +41,9 @@ public class TransportPreviewDatafeedAction extends HandledTransportAction) PreviewDatafeedAction.Request::new); this.client = client; this.clusterService = clusterService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java index 1393d663fb251..c135ab8322b05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -30,6 +29,7 @@ import java.io.IOException; import java.util.Collections; +import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -39,12 +39,10 @@ public class TransportPutCalendarAction extends HandledTransportAction) PutCalendarAction.Request::new); this.client = client; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java index fc14ef085dd33..a8cd2cc8134a5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -30,6 +29,7 @@ import java.io.IOException; import java.util.Collections; +import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -42,10 +42,9 @@ public class TransportPutFilterAction extends HandledTransportAction) PutFilterAction.Request::new); this.client = client; this.jobManager = jobManager; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java index faf6aa80b7a6f..e7455053d525d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -53,11 +52,10 @@ public class TransportStopDatafeedAction extends TransportTasksAction) ValidateDetectorAction.Request::new); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java index 9cd1e5e6aca31..990c673a8c1ed 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java @@ -5,10 +5,11 @@ */ package org.elasticsearch.xpack.ml.action; +import java.util.function.Supplier; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -20,9 +21,9 @@ public class TransportValidateJobConfigAction extends HandledTransportAction) ValidateJobConfigAction.Request::new); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java index 0e7ad29c54da9..27d3a68d00529 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; @@ -265,7 +264,7 @@ public void testDoExecute_whenNothingToClose() { when(clusterService.state()).thenReturn(clusterState); TransportCloseJobAction transportAction = new TransportCloseJobAction(Settings.EMPTY, - mock(TransportService.class), mock(ThreadPool.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), + mock(TransportService.class), mock(ThreadPool.class), mock(ActionFilters.class), clusterService, mock(Client.class), mock(Auditor.class), mock(PersistentTasksService.class)); AtomicBoolean gotResponse = new AtomicBoolean(false); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java index 0f7ad755c5875..b30d8b357c196 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; @@ -41,11 +40,9 @@ public class TransportMonitoringBulkAction extends HandledTransportAction { private final ClusterService clusterService; @Inject - public TransportGetRollupCapsAction(Settings settings, - TransportService transportService, - ClusterService clusterService, - ThreadPool threadPool, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { + public TransportGetRollupCapsAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters) { super(settings, GetRollupCapsAction.NAME, threadPool, transportService, actionFilters, - indexNameExpressionResolver, GetRollupCapsAction.Request::new); + (Supplier) GetRollupCapsAction.Request::new); this.clusterService = clusterService; } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java index 283f3a3742324..b0adf6f12b486 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; @@ -40,12 +39,9 @@ public class TransportGetRollupJobAction extends TransportTasksAction) AuthenticateRequest::new); this.securityContext = securityContext; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java index 047b47dfa256b..8f0256b7e7773 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -27,10 +26,8 @@ public class TransportChangePasswordAction extends HandledTransportAction) DeleteUserRequest::new); this.usersStore = usersStore; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java index 4a57a918c1af0..f40db20a339ef 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -38,10 +37,8 @@ public class TransportGetUsersAction extends HandledTransportAction null, null, Collections.emptySet()); TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService); + rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); request.name(roleName); @@ -77,7 +76,7 @@ public void testValidRole() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService); + rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); request.name(roleName); @@ -121,7 +120,7 @@ public void testException() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService); + rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); request.name(roleName); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java index 431d6cc613c16..9c19bf2097d22 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; @@ -46,7 +45,7 @@ public void testReservedRoles() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore()); + rolesStore, transportService, new ReservedRolesStore()); final int size = randomIntBetween(1, ReservedRolesStore.names().size()); final List names = randomSubsetOf(size, ReservedRolesStore.names()); @@ -92,7 +91,7 @@ public void testStoreRoles() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore()); + rolesStore, transportService, new ReservedRolesStore()); GetRolesRequest request = new GetRolesRequest(); request.names(storeRoleDescriptors.stream().map(RoleDescriptor::getName).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY)); @@ -144,7 +143,7 @@ public void testGetAllOrMix() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore()); + rolesStore, transportService, new ReservedRolesStore()); final List expectedNames = new ArrayList<>(); if (all) { @@ -208,7 +207,7 @@ public void testException() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService, new ReservedRolesStore()); + rolesStore, transportService, new ReservedRolesStore()); GetRolesRequest request = new GetRolesRequest(); request.names(storeRoleDescriptors.stream().map(RoleDescriptor::getName).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java index 0ae2477ba0310..94a69cc044253 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -47,7 +46,7 @@ public void testReservedRole() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService); + rolesStore, transportService); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); @@ -78,7 +77,7 @@ public void testValidRole() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService); + rolesStore, transportService); final boolean created = randomBoolean(); PutRoleRequest request = new PutRoleRequest(); @@ -122,7 +121,7 @@ public void testException() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), rolesStore, transportService); + rolesStore, transportService); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java index be3c86d6a6a52..cc67a4facb0ed 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; @@ -47,9 +46,8 @@ public void setupMocks() { store = mock(NativeRoleMappingStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - action = new TransportGetRoleMappingsAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), - transportService, store); + action = new TransportGetRoleMappingsAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + transportService, store); namesRef = new AtomicReference<>(null); result = Collections.emptyList(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index da9eca7a9b61a..3ba584440bb42 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -44,8 +43,7 @@ public void setupMocks() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); action = new TransportPutRoleMappingAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), - transportService, store); + mock(ActionFilters.class), transportService, store); requestRef = new AtomicReference<>(null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 76d888d2c2e61..85d1d4a161d1d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; @@ -173,7 +172,7 @@ void doExecute(Action action, Request request, ActionListener null, null, Collections.emptySet()); final Realms realms = mock(Realms.class); action = new TransportSamlInvalidateSessionAction(settings, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), tokenService, realms); + mock(ActionFilters.class),tokenService, realms); final Path metadata = PathUtils.get(SamlRealm.class.getResource("idp1.xml").toURI()); final Environment env = TestEnvironment.newEnvironment(settings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index eca52831d9adc..79d4978cfd248 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; @@ -184,8 +183,7 @@ public void setup() throws Exception { final TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); final Realms realms = mock(Realms.class); - action = new TransportSamlLogoutAction(settings, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), realms, tokenService); + action = new TransportSamlLogoutAction(settings, threadPool, transportService, mock(ActionFilters.class), realms, tokenService); final Path metadata = PathUtils.get(SamlRealm.class.getResource("idp1.xml").toURI()); final Environment env = TestEnvironment.newEnvironment(settings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java index 56e714d7a7067..20af681f477ec 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -41,7 +40,7 @@ public void testInternalUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), securityContext); + mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -67,7 +66,7 @@ public void testNullUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), securityContext); + mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -95,7 +94,7 @@ public void testValidUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), securityContext); + mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java index 78f6fd26e93ea..bc1c42f66a55b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; @@ -53,7 +52,7 @@ public void testAnonymousUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportChangePasswordAction action = new TransportChangePasswordAction(settings, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); ChangePasswordRequest request = new ChangePasswordRequest(); request.username(anonymousUser.principal()); @@ -84,7 +83,7 @@ public void testInternalUsers() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); ChangePasswordRequest request = new ChangePasswordRequest(); request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -126,7 +125,7 @@ public void testValidUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -167,7 +166,7 @@ public Void answer(InvocationOnMock invocation) { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java index a60a82e87d71a..dab63fcc31336 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -50,7 +49,7 @@ public void testAnonymousUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportDeleteUserAction action = new TransportDeleteUserAction(settings, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(new AnonymousUser(settings).principal()); @@ -79,7 +78,7 @@ public void testInternalUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -109,7 +108,7 @@ public void testReservedUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(reserved.principal()); @@ -139,7 +138,7 @@ public void testValidUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); final boolean found = randomBoolean(); final DeleteUserRequest request = new DeleteUserRequest(user.principal()); @@ -180,7 +179,7 @@ public void testException() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); final DeleteUserRequest request = new DeleteUserRequest(user.principal()); doAnswer(new Answer() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java index 2ad467236820f..fdb37b2f5bd8c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; @@ -93,7 +92,7 @@ public void testAnonymousUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService, reservedRealm); + usersStore, transportService, reservedRealm); GetUsersRequest request = new GetUsersRequest(); request.usernames(anonymousUser.principal()); @@ -128,7 +127,7 @@ public void testInternalUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService, mock(ReservedRealm.class)); + usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); request.usernames(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -171,7 +170,7 @@ public void testReservedUsersOnly() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService, reservedRealm); + usersStore, transportService, reservedRealm); logger.error("names {}", names); GetUsersRequest request = new GetUsersRequest(); @@ -211,7 +210,7 @@ public void testGetAllUsers() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService, reservedRealm); + usersStore, transportService, reservedRealm); GetUsersRequest request = new GetUsersRequest(); doAnswer(new Answer() { @@ -258,7 +257,7 @@ public void testGetStoreOnlyUsers() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService, mock(ReservedRealm.class)); + usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); request.usernames(storeUsernames); @@ -306,7 +305,7 @@ public void testException() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService, mock(ReservedRealm.class)); + usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); request.usernames(storeUsernames); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java index d4a256b8a0ca8..cb0f643fd89d7 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.Settings; @@ -75,8 +74,7 @@ public void setup() { return null; }).when(authorizationService).roles(eq(user), any(ActionListener.class)); - action = new TransportHasPrivilegesAction(settings, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), authorizationService); + action = new TransportHasPrivilegesAction(settings, threadPool, transportService, mock(ActionFilters.class), authorizationService); } /** diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java index d059911a6807a..140508b51a1b0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -60,7 +59,7 @@ public void testAnonymousUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutUserAction action = new TransportPutUserAction(settings, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); PutUserRequest request = new PutUserRequest(); request.username(anonymousUser.principal()); @@ -90,7 +89,7 @@ public void testSystemUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); PutUserRequest request = new PutUserRequest(); request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -131,7 +130,7 @@ public void testReservedUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); PutUserRequest request = new PutUserRequest(); request.username(reserved.principal()); @@ -161,7 +160,7 @@ public void testValidUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); final boolean isCreate = randomBoolean(); final PutUserRequest request = new PutUserRequest(); @@ -207,7 +206,7 @@ public void testException() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore, transportService); + usersStore, transportService); final PutUserRequest request = new PutUserRequest(); request.username(user.principal()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java index 09fd90437523c..a8076c21cdb49 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; @@ -65,7 +64,7 @@ public void testAnonymousUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportSetEnabledAction action = new TransportSetEnabledAction(settings, threadPool, transportService, mock(ActionFilters.class), - mock(IndexNameExpressionResolver.class), usersStore); + usersStore); SetEnabledRequest request = new SetEnabledRequest(); request.username(new AnonymousUser(settings).principal()); @@ -103,7 +102,7 @@ public void testInternalUser() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportSetEnabledAction action = new TransportSetEnabledAction(Settings.EMPTY, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); SetEnabledRequest request = new SetEnabledRequest(); request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -157,7 +156,7 @@ public Void answer(InvocationOnMock invocation) { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportSetEnabledAction action = new TransportSetEnabledAction(Settings.EMPTY, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -209,7 +208,7 @@ public Void answer(InvocationOnMock invocation) { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportSetEnabledAction action = new TransportSetEnabledAction(Settings.EMPTY, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); @@ -249,7 +248,7 @@ public void testUserModifyingThemselves() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); TransportSetEnabledAction action = new TransportSetEnabledAction(Settings.EMPTY, threadPool, transportService, - mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), usersStore); + mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java index 68752928166ea..3cbb2f8a1bc93 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java @@ -8,8 +8,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -25,13 +25,10 @@ public class TransportSqlClearCursorAction extends HandledTransportAction) SqlClearCursorRequest::new); this.planExecutor = planExecutor; this.sqlLicenseChecker = sqlLicenseChecker; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 46429e2d50829..044683a29ad67 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -9,9 +9,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -34,13 +34,10 @@ public class TransportSqlQueryAction extends HandledTransportAction) SqlQueryRequest::new); this.planExecutor = planExecutor; this.sqlLicenseChecker = sqlLicenseChecker; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java index 8f494231727eb..0df3b2ad1bb50 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java @@ -8,8 +8,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -26,11 +26,10 @@ public class TransportSqlTranslateAction extends HandledTransportAction) SqlTranslateRequest::new); this.planExecutor = planExecutor; this.sqlLicenseChecker = sqlLicenseChecker; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java index 363857f2766d3..cdb1479eec5e3 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.LicenseUtils; @@ -26,9 +25,8 @@ public abstract class WatcherTransportAction request) { - super(settings, actionName, threadPool, transportService, actionFilters, request, indexNameExpressionResolver); + ActionFilters actionFilters, XPackLicenseState licenseState, Writeable.Reader request) { + super(settings, actionName, threadPool, transportService, actionFilters, request); this.licenseState = licenseState; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java index 52c63cab69cab..6a31b0e5cb054 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.Preference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -56,10 +55,9 @@ public class TransportAckWatchAction extends WatcherTransportAction) DeleteWatchRequest::new); this.client = client; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java index 2a199c2b3eb7b..6ccc7518d8b4c 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.Preference; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; @@ -66,11 +65,10 @@ public class TransportExecuteWatchAction extends WatcherTransportAction Date: Wed, 20 Jun 2018 10:23:39 +0200 Subject: [PATCH 41/92] Fix use of time zone in date_histogram rewrite (#31407) Currently, DateHistogramAggregationBuilder#rewriteTimeZone uses the aggregation date math parser and time zone to check whether all values in a read have the same timezone to speed up computation. However, the upper and lower bounds to check are retrieved as longs in epoch_millis, so they don't need to get parsed using a time zone or a parser other than "epoch_millis". This changes this behaviour that was causing problems when the field type mapping was specifying only "epoch_millis" as a format but a different timezone than UTC was used. Closes #31392 --- .../DateHistogramAggregationBuilder.java | 14 ++++---- .../aggregations/bucket/DateHistogramIT.java | 34 ++++++++++++++++++- 2 files changed, 40 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java index bb391f21f1e40..bb785efde488e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java @@ -25,6 +25,8 @@ import org.apache.lucene.search.DocIdSetIterator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.rounding.DateTimeUnit; import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.unit.TimeValue; @@ -36,7 +38,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -59,6 +60,7 @@ import java.io.IOException; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -70,6 +72,7 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuilder implements MultiBucketAggregationBuilder { public static final String NAME = "date_histogram"; + private static DateMathParser EPOCH_MILLIS_PARSER = new DateMathParser(Joda.forPattern("epoch_millis", Locale.ROOT)); public static final Map DATE_FIELD_UNITS; @@ -380,7 +383,7 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException { Long anyInstant = null; final IndexNumericFieldData fieldData = context.getForField(ft); for (LeafReaderContext ctx : reader.leaves()) { - AtomicNumericFieldData leafFD = ((IndexNumericFieldData) fieldData).load(ctx); + AtomicNumericFieldData leafFD = fieldData.load(ctx); SortedNumericDocValues values = leafFD.getLongValues(); if (values.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { anyInstant = values.nextValue(); @@ -406,11 +409,8 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException { // rounding rounds down, so 'nextTransition' is a good upper bound final long high = nextTransition; - final DocValueFormat format = ft.docValueFormat(null, null); - final Object formattedLow = format.format(low); - final Object formattedHigh = format.format(high); - if (ft.isFieldWithinQuery(reader, formattedLow, formattedHigh, - true, false, tz, null, context) == Relation.WITHIN) { + if (ft.isFieldWithinQuery(reader, low, high, true, false, DateTimeZone.UTC, EPOCH_MILLIS_PARSER, + context) == Relation.WITHIN) { // All values in this reader have the same offset despite daylight saving times. // This is very common for location-based timezones such as Europe/Paris in // combination with time-based indices. diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index a4a561cfee35f..26e6f4c076553 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; @@ -41,7 +42,6 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; import org.joda.time.DateTime; @@ -1341,6 +1341,38 @@ public void testExceptionOnNegativeInterval() { } } + /** + * https://github.com/elastic/elasticsearch/issues/31392 demonstrates an edge case where a date field mapping with + * "format" = "epoch_millis" can lead for the date histogram aggregation to throw an error if a non-UTC time zone + * with daylight savings time is used. This test was added to check this is working now + * @throws ExecutionException + * @throws InterruptedException + */ + public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, ExecutionException { + String index = "test31392"; + assertAcked(client().admin().indices().prepareCreate(index).addMapping("type", "d", "type=date,format=epoch_millis").get()); + indexRandom(true, client().prepareIndex(index, "type").setSource("d", "1477954800000")); + ensureSearchable(index); + SearchResponse response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d") + .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("Europe/Berlin"))).execute().actionGet(); + assertSearchResponse(response); + Histogram histo = response.getAggregations().get("histo"); + assertThat(histo.getBuckets().size(), equalTo(1)); + assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000")); + assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L)); + + response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d") + .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("Europe/Berlin")).format("yyyy-MM-dd")) + .execute().actionGet(); + assertSearchResponse(response); + histo = response.getAggregations().get("histo"); + assertThat(histo.getBuckets().size(), equalTo(1)); + assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("2016-11-01")); + assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L)); + + internalCluster().wipeIndices(index); + } + /** * When DST ends, local time turns back one hour, so between 2am and 4am wall time we should have four buckets: * "2015-10-25T02:00:00.000+02:00", From 5683bc60a69f928ac37431a8410fc0527381dbbf Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 20 Jun 2018 10:16:26 +0100 Subject: [PATCH 42/92] Multiplexing token filter (#31208) The `multiplexer` filter emits multiple tokens at the same position, each version of the token haivng been passed through a different filter chain. Identical tokens at the same position are removed. This allows users to, for example, index lowercase and original-case tokens, or stemmed and unstemmed versions, in the same field, so that they can search for a stemmed term within x positions of an unstemmed term. --- docs/reference/analysis/tokenfilters.asciidoc | 2 + .../multiplexer-tokenfilter.asciidoc | 116 +++++++++++ .../analysis/common/CommonAnalysisPlugin.java | 1 + .../common/MultiplexerTokenFilterFactory.java | 195 ++++++++++++++++++ .../common/MultiplexerTokenFilterTests.java | 106 ++++++++++ .../index/analysis/AnalysisRegistry.java | 13 +- .../analysis/ReferringFilterFactory.java | 37 ++++ .../index/analysis/AnalysisRegistryTests.java | 1 - 8 files changed, 469 insertions(+), 2 deletions(-) create mode 100644 docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc create mode 100644 modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java create mode 100644 server/src/main/java/org/elasticsearch/index/analysis/ReferringFilterFactory.java diff --git a/docs/reference/analysis/tokenfilters.asciidoc b/docs/reference/analysis/tokenfilters.asciidoc index dd5cb2e702cff..ee891fdd09aa7 100644 --- a/docs/reference/analysis/tokenfilters.asciidoc +++ b/docs/reference/analysis/tokenfilters.asciidoc @@ -35,6 +35,8 @@ include::tokenfilters/word-delimiter-tokenfilter.asciidoc[] include::tokenfilters/word-delimiter-graph-tokenfilter.asciidoc[] +include::tokenfilters/multiplexer-tokenfilter.asciidoc[] + include::tokenfilters/stemmer-tokenfilter.asciidoc[] include::tokenfilters/stemmer-override-tokenfilter.asciidoc[] diff --git a/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc new file mode 100644 index 0000000000000..51937084e3984 --- /dev/null +++ b/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc @@ -0,0 +1,116 @@ +[[analysis-multiplexer-tokenfilter]] +=== Multiplexer Token Filter + +A token filter of type `multiplexer` will emit multiple tokens at the same position, +each version of the token having been run through a different filter. Identical +output tokens at the same position will be removed. + +WARNING: If the incoming token stream has duplicate tokens, then these will also be +removed by the multiplexer + +[float] +=== Options +[horizontal] +filters:: a list of token filters to apply to incoming tokens. These can be any + token filters defined elsewhere in the index mappings. Filters can be chained + using a comma-delimited string, so for example `"lowercase, porter_stem"` would + apply the `lowercase` filter and then the `porter_stem` filter to a single token. + +WARNING: Shingle or multi-word synonym token filters will not function normally + when they are declared in the filters array because they read ahead internally + which is unsupported by the multiplexer + +preserve_original:: if `true` (the default) then emit the original token in + addition to the filtered tokens + + +[float] +=== Settings example + +You can set it up like: + +[source,js] +-------------------------------------------------- +PUT /multiplexer_example +{ + "settings" : { + "analysis" : { + "analyzer" : { + "my_analyzer" : { + "tokenizer" : "standard", + "filter" : [ "my_multiplexer" ] + } + }, + "filter" : { + "my_multiplexer" : { + "type" : "multiplexer", + "filters" : [ "lowercase", "lowercase, porter_stem" ] + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +And test it like: + +[source,js] +-------------------------------------------------- +POST /multiplexer_example/_analyze +{ + "analyzer" : "my_analyzer", + "text" : "Going HOME" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +And it'd respond: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "Going", + "start_offset": 0, + "end_offset": 5, + "type": "", + "position": 0 + }, + { + "token": "going", + "start_offset": 0, + "end_offset": 5, + "type": "", + "position": 0 + }, + { + "token": "go", + "start_offset": 0, + "end_offset": 5, + "type": "", + "position": 0 + }, + { + "token": "HOME", + "start_offset": 6, + "end_offset": 10, + "type": "", + "position": 1 + }, + { + "token": "home", <1> + "start_offset": 6, + "end_offset": 10, + "type": "", + "position": 1 + } + ] +} +-------------------------------------------------- +// TESTRESPONSE + +<1> The stemmer has also emitted a token `home` at position 1, but because it is a +duplicate of this token it has been removed from the token stream \ No newline at end of file diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index cdd8101a73c70..ca2f74b5efee0 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -226,6 +226,7 @@ public Map> getTokenFilters() { filters.put("limit", LimitTokenCountFilterFactory::new); filters.put("lowercase", LowerCaseTokenFilterFactory::new); filters.put("min_hash", MinHashTokenFilterFactory::new); + filters.put("multiplexer", MultiplexerTokenFilterFactory::new); filters.put("ngram", NGramTokenFilterFactory::new); filters.put("nGram", NGramTokenFilterFactory::new); filters.put("pattern_capture", requriesAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new)); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java new file mode 100644 index 0000000000000..1cf5303a77209 --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java @@ -0,0 +1,195 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter; +import org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilter; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; +import org.elasticsearch.index.analysis.ReferringFilterFactory; +import org.elasticsearch.index.analysis.TokenFilterFactory; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory implements ReferringFilterFactory { + + private List filters; + private List filterNames; + private final boolean preserveOriginal; + + private static final TokenFilterFactory IDENTITY_FACTORY = new TokenFilterFactory() { + @Override + public String name() { + return "identity"; + } + + @Override + public TokenStream create(TokenStream tokenStream) { + return tokenStream; + } + }; + + public MultiplexerTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) throws IOException { + super(indexSettings, name, settings); + this.filterNames = settings.getAsList("filters"); + this.preserveOriginal = settings.getAsBoolean("preserve_original", true); + } + + @Override + public TokenStream create(TokenStream tokenStream) { + List> functions = new ArrayList<>(); + for (TokenFilterFactory tff : filters) { + functions.add(tff::create); + } + return new RemoveDuplicatesTokenFilter(new MultiplexTokenFilter(tokenStream, functions)); + } + + @Override + public void setReferences(Map factories) { + filters = new ArrayList<>(); + if (preserveOriginal) { + filters.add(IDENTITY_FACTORY); + } + for (String filter : filterNames) { + String[] parts = Strings.tokenizeToStringArray(filter, ","); + if (parts.length == 1) { + filters.add(resolveFilterFactory(factories, parts[0])); + } else { + List chain = new ArrayList<>(); + for (String subfilter : parts) { + chain.add(resolveFilterFactory(factories, subfilter)); + } + filters.add(chainFilters(filter, chain)); + } + } + } + + private TokenFilterFactory chainFilters(String name, List filters) { + return new TokenFilterFactory() { + @Override + public String name() { + return name; + } + + @Override + public TokenStream create(TokenStream tokenStream) { + for (TokenFilterFactory tff : filters) { + tokenStream = tff.create(tokenStream); + } + return tokenStream; + } + }; + } + + private TokenFilterFactory resolveFilterFactory(Map factories, String name) { + if (factories.containsKey(name) == false) { + throw new IllegalArgumentException("Multiplexing filter [" + name() + "] refers to undefined tokenfilter [" + name + "]"); + } else { + return factories.get(name); + } + } + + private final class MultiplexTokenFilter extends TokenFilter { + + private final TokenStream source; + private final int filterCount; + + private int selector; + + /** + * Creates a MultiplexTokenFilter on the given input with a set of filters + */ + MultiplexTokenFilter(TokenStream input, List> filters) { + super(input); + TokenStream source = new MultiplexerFilter(input); + for (int i = 0; i < filters.size(); i++) { + final int slot = i; + source = new ConditionalTokenFilter(source, filters.get(i)) { + @Override + protected boolean shouldFilter() { + return slot == selector; + } + }; + } + this.source = source; + this.filterCount = filters.size(); + this.selector = filterCount - 1; + } + + @Override + public boolean incrementToken() throws IOException { + return source.incrementToken(); + } + + @Override + public void end() throws IOException { + source.end(); + } + + @Override + public void reset() throws IOException { + source.reset(); + } + + private final class MultiplexerFilter extends TokenFilter { + + State state; + PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class); + + private MultiplexerFilter(TokenStream input) { + super(input); + } + + @Override + public boolean incrementToken() throws IOException { + if (selector >= filterCount - 1) { + selector = 0; + if (input.incrementToken() == false) { + return false; + } + state = captureState(); + return true; + } + restoreState(state); + posIncAtt.setPositionIncrement(0); + selector++; + return true; + } + + @Override + public void reset() throws IOException { + super.reset(); + selector = filterCount - 1; + this.state = null; + } + } + + } +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java new file mode 100644 index 0000000000000..c39fa05c26f72 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterTests.java @@ -0,0 +1,106 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; + +import java.io.IOException; +import java.util.Collections; + +public class MultiplexerTokenFilterTests extends ESTokenStreamTestCase { + + public void testMultiplexingFilter() throws IOException { + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("index.analysis.filter.t.type", "truncate") + .put("index.analysis.filter.t.length", "2") + .put("index.analysis.filter.multiplexFilter.type", "multiplexer") + .putList("index.analysis.filter.multiplexFilter.filters", "lowercase, t", "uppercase") + .put("index.analysis.analyzer.myAnalyzer.type", "custom") + .put("index.analysis.analyzer.myAnalyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.myAnalyzer.filter", "multiplexFilter") + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + + IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings); + + try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) { + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "ONe tHree", new String[]{ + "ONe", "on", "ONE", "tHree", "th", "THREE" + }, new int[]{ + 1, 0, 0, 1, 0, 0 + }); + // Duplicates are removed + assertAnalyzesTo(analyzer, "ONe THREE", new String[]{ + "ONe", "on", "ONE", "THREE", "th" + }, new int[]{ + 1, 0, 0, 1, 0, 0 + }); + } + } + + public void testMultiplexingNoOriginal() throws IOException { + + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("index.analysis.filter.t.type", "truncate") + .put("index.analysis.filter.t.length", "2") + .put("index.analysis.filter.multiplexFilter.type", "multiplexer") + .put("index.analysis.filter.multiplexFilter.preserve_original", "false") + .putList("index.analysis.filter.multiplexFilter.filters", "lowercase, t", "uppercase") + .put("index.analysis.analyzer.myAnalyzer.type", "custom") + .put("index.analysis.analyzer.myAnalyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.myAnalyzer.filter", "multiplexFilter") + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + + IndexAnalyzers indexAnalyzers = new AnalysisModule(TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin())).getAnalysisRegistry().build(idxSettings); + + try (NamedAnalyzer analyzer = indexAnalyzers.get("myAnalyzer")) { + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "ONe tHree", new String[]{ + "on", "ONE", "th", "THREE" + }, new int[]{ + 1, 0, 1, 0, + }); + } + + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 61b5cb9171244..c61a7cf070680 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -166,7 +166,18 @@ public Map buildTokenFilterFactories(IndexSettings i */ tokenFilters.put("synonym", requiresAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings))); tokenFilters.put("synonym_graph", requiresAnalysisSettings((is, env, name, settings) -> new SynonymGraphTokenFilterFactory(is, env, this, name, settings))); - return buildMapping(Component.FILTER, indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.preConfiguredTokenFilters); + + Map mappings + = buildMapping(Component.FILTER, indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.preConfiguredTokenFilters); + + // ReferringTokenFilters require references to other tokenfilters, so we pass these in + // after all factories have been registered + for (TokenFilterFactory tff : mappings.values()) { + if (tff instanceof ReferringFilterFactory) { + ((ReferringFilterFactory)tff).setReferences(mappings); + } + } + return mappings; } public Map buildTokenizerFactories(IndexSettings indexSettings) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ReferringFilterFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/ReferringFilterFactory.java new file mode 100644 index 0000000000000..9eb9bc2dbd653 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/analysis/ReferringFilterFactory.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import java.util.Map; + +/** + * Marks a {@link TokenFilterFactory} that refers to other filter factories. + * + * The analysis registry will call {@link #setReferences(Map)} with a map of all + * available TokenFilterFactories after all factories have been registered + */ +public interface ReferringFilterFactory { + + /** + * Called with a map of all registered filter factories + */ + void setReferences(Map factories); + +} diff --git a/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java b/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java index 36da9761b978d..26a5b87866c21 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.analysis.MockTokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.en.EnglishAnalyzer; From e7a7b9689d8a45bbee71a9ccc5d72b9e91ed56e1 Mon Sep 17 00:00:00 2001 From: Peter Dyson Date: Wed, 20 Jun 2018 21:04:03 +1000 Subject: [PATCH 43/92] =?UTF-8?q?[Docs]=C2=A0Mention=20ip=5Frange=20dataty?= =?UTF-8?q?pes=20on=20ip=20type=20page=20(#31416)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit A link to the ip_range datatype page provides a way for newer users to know it exists if they land directly on the ip datatype page first via a search. --- docs/reference/mapping/types/ip.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/mapping/types/ip.asciidoc b/docs/reference/mapping/types/ip.asciidoc index 512b0d725457e..695cd1c626bc3 100644 --- a/docs/reference/mapping/types/ip.asciidoc +++ b/docs/reference/mapping/types/ip.asciidoc @@ -36,6 +36,8 @@ GET my_index/_search // CONSOLE // TESTSETUP +NOTE: You can also store ip ranges in a single field using an <>. + [[ip-params]] ==== Parameters for `ip` fields From 2aefb72891064e8d4d7c8f440663dc201369c828 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Wed, 20 Jun 2018 13:12:56 +0200 Subject: [PATCH 44/92] Choose JVM options ergonomically With this commit we add the possibility to define further JVM options (and system properties) based on the current environment. As a proof of concept, it chooses Netty's allocator ergonomically based on the maximum defined heap size. We switch to the unpooled allocator at 1GB heap size (value determined experimentally, see #30684 for more details). We are also explicit about the choice of the allocator in either case. Relates #30684 --- .../tools/launchers/JvmErgonomics.java | 108 ++++++++++++++++++ .../tools/launchers/JvmOptionsParser.java | 2 + .../tools/launchers/JvmErgonomicsTests.java | 83 ++++++++++++++ 3 files changed, 193 insertions(+) create mode 100644 distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java create mode 100644 distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/JvmErgonomicsTests.java diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java new file mode 100644 index 0000000000000..761cd9e1be5db --- /dev/null +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tools.launchers; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Tunes Elasticsearch JVM settings based on inspection of provided JVM options. + */ +final class JvmErgonomics { + private static final long KB = 1024L; + + private static final long MB = 1024L * 1024L; + + private static final long GB = 1024L * 1024L * 1024L; + + + private JvmErgonomics() { + throw new AssertionError("No instances intended"); + } + + /** + * Chooses additional JVM options for Elasticsearch. + * + * @param userDefinedJvmOptions A list of JVM options that have been defined by the user. + * @return A list of additional JVM options to set. + */ + static List choose(List userDefinedJvmOptions) { + List ergonomicChoices = new ArrayList<>(); + Long heapSize = extractHeapSize(userDefinedJvmOptions); + Map systemProperties = extractSystemProperties(userDefinedJvmOptions); + if (heapSize != null) { + if (systemProperties.containsKey("io.netty.allocator.type") == false) { + if (heapSize <= 1 * GB) { + ergonomicChoices.add("-Dio.netty.allocator.type=unpooled"); + } else { + ergonomicChoices.add("-Dio.netty.allocator.type=pooled"); + } + } + } + return ergonomicChoices; + } + + private static final Pattern MAX_HEAP_SIZE = Pattern.compile("^(-Xmx|-XX:MaxHeapSize=)(?\\d+)(?\\w)?$"); + + // package private for testing + static Long extractHeapSize(List userDefinedJvmOptions) { + for (String jvmOption : userDefinedJvmOptions) { + final Matcher matcher = MAX_HEAP_SIZE.matcher(jvmOption); + if (matcher.matches()) { + final long size = Long.parseLong(matcher.group("size")); + final String unit = matcher.group("unit"); + if (unit == null) { + return size; + } else { + switch (unit.toLowerCase(Locale.ROOT)) { + case "k": + return size * KB; + case "m": + return size * MB; + case "g": + return size * GB; + default: + throw new IllegalArgumentException("Unknown unit [" + unit + "] for max heap size in [" + jvmOption + "]"); + } + } + } + } + return null; + } + + private static final Pattern SYSTEM_PROPERTY = Pattern.compile("^-D(?[\\w+].*?)=(?.*)$"); + + // package private for testing + static Map extractSystemProperties(List userDefinedJvmOptions) { + Map systemProperties = new HashMap<>(); + for (String jvmOption : userDefinedJvmOptions) { + final Matcher matcher = SYSTEM_PROPERTY.matcher(jvmOption); + if (matcher.matches()) { + systemProperties.put(matcher.group("key"), matcher.group("value")); + } + } + return systemProperties; + } +} diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java index 7f612132d8c59..c19c12cfe4444 100644 --- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java @@ -78,6 +78,8 @@ public void accept(final int lineNumber, final String line) { } if (invalidLines.isEmpty()) { + List ergonomicJvmOptions = JvmErgonomics.choose(jvmOptions); + jvmOptions.addAll(ergonomicJvmOptions); final String spaceDelimitedJvmOptions = spaceDelimitJvmOptions(jvmOptions); Launchers.outPrintln(spaceDelimitedJvmOptions); Launchers.exit(0); diff --git a/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/JvmErgonomicsTests.java b/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/JvmErgonomicsTests.java new file mode 100644 index 0000000000000..4b075d78b70a8 --- /dev/null +++ b/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/JvmErgonomicsTests.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tools.launchers; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +public class JvmErgonomicsTests extends LaunchersTestCase { + public void testExtractValidHeapSize() { + assertEquals(Long.valueOf(1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx1024"))); + assertEquals(Long.valueOf(2L * 1024 * 1024 * 1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx2g"))); + assertEquals(Long.valueOf(32 * 1024 * 1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx32M"))); + assertEquals(Long.valueOf(32 * 1024 * 1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-XX:MaxHeapSize=32M"))); + } + + public void testExtractInvalidHeapSize() { + try { + JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx2T")); + fail("Expected IllegalArgumentException to be raised"); + } catch (IllegalArgumentException expected) { + assertEquals("Unknown unit [T] for max heap size in [-Xmx2T]", expected.getMessage()); + } + } + + public void testExtractNoHeapSize() { + assertNull("No spaces allowed", JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx 1024"))); + assertNull("JVM option is not present", JvmErgonomics.extractHeapSize(Collections.singletonList(""))); + assertNull("Multiple JVM options per line", JvmErgonomics.extractHeapSize(Collections.singletonList("-Xms2g -Xmx2g"))); + } + + public void testExtractSystemProperties() { + Map expectedSystemProperties = new HashMap<>(); + expectedSystemProperties.put("file.encoding", "UTF-8"); + expectedSystemProperties.put("kv.setting", "ABC=DEF"); + + Map parsedSystemProperties = JvmErgonomics.extractSystemProperties( + Arrays.asList("-Dfile.encoding=UTF-8", "-Dkv.setting=ABC=DEF")); + + assertEquals(expectedSystemProperties, parsedSystemProperties); + } + + public void testExtractNoSystemProperties() { + Map parsedSystemProperties = JvmErgonomics.extractSystemProperties(Arrays.asList("-Xms1024M", "-Xmx1024M")); + assertTrue(parsedSystemProperties.isEmpty()); + } + + public void testLittleMemoryErgonomicChoices() { + String smallHeap = randomFrom(Arrays.asList("64M", "512M", "1024M", "1G")); + List expectedChoices = Collections.singletonList("-Dio.netty.allocator.type=unpooled"); + assertEquals(expectedChoices, JvmErgonomics.choose(Arrays.asList("-Xms" + smallHeap, "-Xmx" + smallHeap))); + } + + public void testPlentyMemoryErgonomicChoices() { + String largeHeap = randomFrom(Arrays.asList("1025M", "2048M", "2G", "8G")); + List expectedChoices = Collections.singletonList("-Dio.netty.allocator.type=pooled"); + assertEquals(expectedChoices, JvmErgonomics.choose(Arrays.asList("-Xms" + largeHeap, "-Xmx" + largeHeap))); + } +} From 297e99c4c2bb313e45f320d3ccd68498ddc2c4fd Mon Sep 17 00:00:00 2001 From: Jonathan Pool Date: Wed, 20 Jun 2018 08:18:55 -0400 Subject: [PATCH 45/92] [Docs] Extend Homebrew installation instructions (#28902) Adding a note about proceeding after a successful homebrew installation. --- docs/reference/getting-started.asciidoc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index d684be80c00b8..39006d1ab53f8 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -158,6 +158,9 @@ On macOS, Elasticsearch can also be installed via https://brew.sh[Homebrew]: brew install elasticsearch -------------------------------------------------- +If installation succeeds, Homebrew will finish by saying that you can start Elasticsearch by entering +`elasticsearch`. Do that now. The expected response is described below, under <>. + [float] === Installation example with MSI Windows Installer @@ -216,6 +219,7 @@ And now we are ready to start our node and single cluster: -------------------------------------------------- [float] +[[successfully-running-node]] === Successfully running node If everything goes well with installation, you should see a bunch of messages that look like below: From db1b97fd853462b7b5cc44511236de1b9073e494 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 20 Jun 2018 10:34:08 -0400 Subject: [PATCH 46/92] Remove QueryCachingPolicy#ALWAYS_CACHE (#31451) The QueryCachingPolicy#ALWAYS_CACHE was deprecated in Lucene-7.4 and will be removed in Lucene-8.0. This change replaces it with QueryCachingPolicy. This also makes INDEX_QUERY_CACHE_EVERYTHING_SETTING visible in testing only. --- .../common/settings/IndexScopedSettings.java | 1 - .../elasticsearch/index/shard/IndexShard.java | 12 +++++++++- .../indices/IndicesQueryCacheTests.java | 23 +++++++++++++++---- .../scriptfilter/ScriptQuerySearchIT.java | 4 +++- .../elasticsearch/test/ESIntegTestCase.java | 4 ---- .../test/InternalSettingsPlugin.java | 5 +++- .../test/SecuritySettingsSource.java | 3 ++- 7 files changed, 38 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index bd6bba7b784cd..6f45a5d462bd8 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -151,7 +151,6 @@ public final class IndexScopedSettings extends AbstractScopedSettings { IndexModule.INDEX_STORE_TYPE_SETTING, IndexModule.INDEX_STORE_PRE_LOAD_SETTING, IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING, - IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING, FsDirectoryService.INDEX_LOCK_FACTOR_SETTING, EngineConfig.INDEX_CODEC_SETTING, EngineConfig.INDEX_OPTIMIZE_AUTO_GENERATED_IDS, diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 34230be14cb7e..fb987fe035afa 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -29,6 +29,7 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.Term; +import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.Sort; @@ -299,7 +300,16 @@ public IndexShard( // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis if (IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.get(settings)) { - cachingPolicy = QueryCachingPolicy.ALWAYS_CACHE; + cachingPolicy = new QueryCachingPolicy() { + @Override + public void onUse(Query query) { + + } + @Override + public boolean shouldCache(Query query) { + return true; + } + }; } else { cachingPolicy = new UsageTrackingQueryCachingPolicy(); } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index 83bde66e3bd21..e155639f143c6 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -89,6 +89,19 @@ public boolean isCacheable(LeafReaderContext ctx) { } + private static QueryCachingPolicy alwaysCachePolicy() { + return new QueryCachingPolicy() { + @Override + public void onUse(Query query) { + + } + @Override + public boolean shouldCache(Query query) { + return true; + } + }; + } + public void testBasics() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); @@ -98,7 +111,7 @@ public void testBasics() throws IOException { ShardId shard = new ShardId("index", "_na_", 0); r = ElasticsearchDirectoryReader.wrap(r, shard); IndexSearcher s = new IndexSearcher(r); - s.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + s.setQueryCachingPolicy(alwaysCachePolicy()); Settings settings = Settings.builder() .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10) @@ -169,7 +182,7 @@ public void testTwoShards() throws IOException { ShardId shard1 = new ShardId("index", "_na_", 0); r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); IndexSearcher s1 = new IndexSearcher(r1); - s1.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + s1.setQueryCachingPolicy(alwaysCachePolicy()); Directory dir2 = newDirectory(); IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig()); @@ -179,7 +192,7 @@ public void testTwoShards() throws IOException { ShardId shard2 = new ShardId("index", "_na_", 1); r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); IndexSearcher s2 = new IndexSearcher(r2); - s2.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + s2.setQueryCachingPolicy(alwaysCachePolicy()); Settings settings = Settings.builder() .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10) @@ -295,7 +308,7 @@ public void testStatsOnEviction() throws IOException { ShardId shard1 = new ShardId("index", "_na_", 0); r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); IndexSearcher s1 = new IndexSearcher(r1); - s1.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + s1.setQueryCachingPolicy(alwaysCachePolicy()); Directory dir2 = newDirectory(); IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig()); @@ -305,7 +318,7 @@ public void testStatsOnEviction() throws IOException { ShardId shard2 = new ShardId("index", "_na_", 1); r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); IndexSearcher s2 = new IndexSearcher(r2); - s2.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); + s2.setQueryCachingPolicy(alwaysCachePolicy()); Settings settings = Settings.builder() .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10) diff --git a/server/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java index 45e374b8697a2..16a9d99b78341 100644 --- a/server/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -31,8 +31,10 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; +import java.util.Arrays; import java.util.Base64; import java.util.Collection; import java.util.Collections; @@ -52,7 +54,7 @@ public class ScriptQuerySearchIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(CustomScriptPlugin.class); + return Arrays.asList(CustomScriptPlugin.class, InternalSettingsPlugin.class); } public static class CustomScriptPlugin extends MockScriptPlugin { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 979bfccdb64b8..c63a1c9c6e68f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -434,10 +434,6 @@ public void randomIndexTemplate() throws IOException { if (randomBoolean()) { randomSettingsBuilder.put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), randomBoolean()); } - - if (randomBoolean()) { - randomSettingsBuilder.put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), randomBoolean()); - } PutIndexTemplateRequestBuilder putTemplate = client().admin().indices() .preparePutTemplate("random_index_template") .setPatterns(Collections.singletonList("*")) diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java index e1c555b811064..be8c824f0f790 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; @@ -51,6 +52,8 @@ public List> getSettings() { INDEX_CREATION_DATE_SETTING, PROVIDED_NAME_SETTING, TRANSLOG_RETENTION_CHECK_INTERVAL_SETTING, - IndexService.GLOBAL_CHECKPOINT_SYNC_INTERVAL_SETTING); + IndexService.GLOBAL_CHECKPOINT_SYNC_INTERVAL_SETTING, + IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING + ); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index 1690ab652c067..8ad1c61029a97 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -169,7 +169,8 @@ protected void addDefaultSecurityTransportType(Settings.Builder builder, Setting @Override public Collection> nodePlugins() { - return Arrays.asList(LocalStateSecurity.class, Netty4Plugin.class, ReindexPlugin.class, CommonAnalysisPlugin.class); + return Arrays.asList(LocalStateSecurity.class, Netty4Plugin.class, ReindexPlugin.class, CommonAnalysisPlugin.class, + InternalSettingsPlugin.class); } @Override From 90d62e6e4d8b331a25ab1a99e4addc9b4ef9cbc3 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Wed, 20 Jun 2018 16:42:50 +0200 Subject: [PATCH 47/92] Add Delete Snapshot High Level REST API With this commit we add the delete snapshot API to the Java high level REST client. Relates #27205 Relates #31393 --- .../client/RequestConverters.java | 13 ++++ .../elasticsearch/client/SnapshotClient.java | 32 ++++++++ .../client/RequestConvertersTests.java | 20 +++++ .../org/elasticsearch/client/SnapshotIT.java | 27 +++++++ .../SnapshotClientDocumentationIT.java | 73 +++++++++++++++++++ .../snapshot/delete_snapshot.asciidoc | 73 +++++++++++++++++++ .../high-level/supported-apis.asciidoc | 2 + .../delete/DeleteSnapshotResponse.java | 6 ++ .../delete/DeleteSnapshotResponseTests.java | 41 +++++++++++ 9 files changed, 287 insertions(+) create mode 100644 docs/java-rest/high-level/snapshot/delete_snapshot.asciidoc create mode 100644 server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponseTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 9d4582494eb91..cd67bc8e48325 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -39,6 +39,7 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; @@ -844,6 +845,18 @@ static Request verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest) return request; } + static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) { + String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") + .addPathPart(deleteSnapshotRequest.repository()) + .addPathPart(deleteSnapshotRequest.snapshot()) + .build(); + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + + Params parameters = new Params(request); + parameters.withMasterTimeout(deleteSnapshotRequest.masterNodeTimeout()); + return request; + } + static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException { String endpoint = new EndpointBuilder().addPathPartAsIs("_template").addPathPart(putIndexTemplateRequest.name()).build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java index b7cd2d52732cc..36b4f473ce82f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java @@ -28,6 +28,8 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import java.io.IOException; @@ -161,4 +163,34 @@ public void verifyRepositoryAsync(VerifyRepositoryRequest verifyRepositoryReques restHighLevelClient.performRequestAsyncAndParseEntity(verifyRepositoryRequest, RequestConverters::verifyRepository, options, VerifyRepositoryResponse::fromXContent, listener, emptySet()); } + + /** + * Deletes a snapshot. + * See Snapshot and Restore + * API on elastic.co + * + * @param deleteSnapshotRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public DeleteSnapshotResponse delete(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(deleteSnapshotRequest, RequestConverters::deleteSnapshot, options, + DeleteSnapshotResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously deletes a snapshot. + * See Snapshot and Restore + * API on elastic.co + * + * @param deleteSnapshotRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void deleteAsync(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(deleteSnapshotRequest, RequestConverters::deleteSnapshot, options, + DeleteSnapshotResponse::fromXContent, listener, emptySet()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index e7d56a4332b82..eee37cea561b0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.indices.alias.Alias; @@ -1857,6 +1858,25 @@ public void testVerifyRepository() { assertThat(expectedParams, equalTo(request.getParameters())); } + public void testDeleteSnapshot() { + Map expectedParams = new HashMap<>(); + String repository = randomIndicesNames(1, 1)[0]; + String snapshot = "snapshot-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); + + String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s", repository, snapshot); + + DeleteSnapshotRequest deleteSnapshotRequest = new DeleteSnapshotRequest(); + deleteSnapshotRequest.repository(repository); + deleteSnapshotRequest.snapshot(snapshot); + setRandomMasterTimeout(deleteSnapshotRequest, expectedParams); + + Request request = RequestConverters.deleteSnapshot(deleteSnapshotRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpDelete.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + assertNull(request.getEntity()); + } + public void testPutTemplateRequest() throws Exception { Map names = new HashMap<>(); names.put("log", "log"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java index 3b27c2631f4d3..f4d325e158bc5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java @@ -28,11 +28,14 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.Locale; import static org.hamcrest.Matchers.equalTo; @@ -46,6 +49,13 @@ private PutRepositoryResponse createTestRepository(String repository, String typ highLevelClient().snapshot()::createRepositoryAsync); } + private Response createTestSnapshot(String repository, String snapshot) throws IOException { + Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repository, snapshot)); + createSnapshot.addParameter("wait_for_completion", "true"); + return highLevelClient().getLowLevelClient().performRequest(createSnapshot); + } + + public void testCreateRepository() throws IOException { PutRepositoryResponse response = createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}"); assertTrue(response.isAcknowledged()); @@ -108,4 +118,21 @@ public void testVerifyRepository() throws IOException { highLevelClient().snapshot()::verifyRepositoryAsync); assertThat(response.getNodes().size(), equalTo(1)); } + + public void testDeleteSnapshot() throws IOException { + String repository = "test_repository"; + String snapshot = "test_snapshot"; + + PutRepositoryResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}"); + assertTrue(putRepositoryResponse.isAcknowledged()); + + Response putSnapshotResponse = createTestSnapshot(repository, snapshot); + // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead. + assertEquals(200, putSnapshotResponse.getStatusLine().getStatusCode()); + + DeleteSnapshotRequest request = new DeleteSnapshotRequest(repository, snapshot); + DeleteSnapshotResponse response = execute(request, highLevelClient().snapshot()::delete, highLevelClient().snapshot()::deleteAsync); + + assertTrue(response.isAcknowledged()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index 8c158a91a5111..965f9641e48ad 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -29,8 +29,12 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Settings; @@ -41,6 +45,7 @@ import java.io.IOException; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -69,6 +74,8 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase private static final String repositoryName = "test_repository"; + private static final String snapshotName = "test_snapshot"; + public void testSnapshotCreateRepository() throws IOException { RestHighLevelClient client = highLevelClient(); @@ -360,10 +367,76 @@ public void onFailure(Exception e) { } } + public void testSnapshotDeleteSnapshot() throws IOException { + RestHighLevelClient client = highLevelClient(); + + createTestRepositories(); + createTestSnapshots(); + + // tag::delete-snapshot-request + DeleteSnapshotRequest request = new DeleteSnapshotRequest(repositoryName); + request.snapshot(snapshotName); + // end::delete-snapshot-request + + // tag::delete-snapshot-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> + request.masterNodeTimeout("1m"); // <2> + // end::delete-snapshot-request-masterTimeout + + // tag::delete-snapshot-execute + DeleteSnapshotResponse response = client.snapshot().delete(request, RequestOptions.DEFAULT); + // end::delete-snapshot-execute + + // tag::delete-snapshot-response + boolean acknowledged = response.isAcknowledged(); // <1> + // end::delete-snapshot-response + assertTrue(acknowledged); + } + + public void testSnapshotDeleteSnapshotAsync() throws InterruptedException { + RestHighLevelClient client = highLevelClient(); + { + DeleteSnapshotRequest request = new DeleteSnapshotRequest(); + + // tag::delete-snapshot-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(DeleteSnapshotResponse deleteSnapshotResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::delete-snapshot-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::delete-snapshot-execute-async + client.snapshot().deleteAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::delete-snapshot-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + private void createTestRepositories() throws IOException { PutRepositoryRequest request = new PutRepositoryRequest(repositoryName); request.type(FsRepository.TYPE); request.settings("{\"location\": \".\"}", XContentType.JSON); assertTrue(highLevelClient().snapshot().createRepository(request, RequestOptions.DEFAULT).isAcknowledged()); } + + private void createTestSnapshots() throws IOException { + Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repositoryName, snapshotName)); + createSnapshot.addParameter("wait_for_completion", "true"); + Response response = highLevelClient().getLowLevelClient().performRequest(createSnapshot); + // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead. + assertEquals(200, response.getStatusLine().getStatusCode()); + } } diff --git a/docs/java-rest/high-level/snapshot/delete_snapshot.asciidoc b/docs/java-rest/high-level/snapshot/delete_snapshot.asciidoc new file mode 100644 index 0000000000000..a594db5b60259 --- /dev/null +++ b/docs/java-rest/high-level/snapshot/delete_snapshot.asciidoc @@ -0,0 +1,73 @@ +[[java-rest-high-snapshot-delete-snapshot]] +=== Delete Snapshot API + +The Delete Snapshot API allows to delete a snapshot. + +[[java-rest-high-snapshot-delete-snapshot-request]] +==== Delete Snapshot Request + +A `DeleteSnapshotRequest`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[delete-snapshot-request] +-------------------------------------------------- + +==== Optional Arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[delete-snapshot-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue` +<2> Timeout to connect to the master node as a `String` + +[[java-rest-high-snapshot-delete-snapshot-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[delete-snapshot-execute] +-------------------------------------------------- + +[[java-rest-high-snapshot-delete-snapshot-async]] +==== Asynchronous Execution + +The asynchronous execution of a delete snapshot request requires both the +`DeleteSnapshotRequest` instance and an `ActionListener` instance to be +passed to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[delete-snapshot-execute-async] +-------------------------------------------------- +<1> The `DeleteSnapshotRequest` to execute and the `ActionListener` +to use when the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `DeleteSnapshotResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[delete-snapshot-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of a failure. The raised exception is provided as an argument + +[[java-rest-high-cluster-delete-snapshot-response]] +==== Delete Snapshot Response + +The returned `DeleteSnapshotResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[delete-snapshot-response] +-------------------------------------------------- +<1> Indicates the node has acknowledged the request diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 17acc8f13c04d..727088aa5737f 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -136,11 +136,13 @@ The Java High Level REST Client supports the following Snapshot APIs: * <> * <> * <> +* <> include::snapshot/get_repository.asciidoc[] include::snapshot/create_repository.asciidoc[] include::snapshot/delete_repository.asciidoc[] include::snapshot/verify_repository.asciidoc[] +include::snapshot/delete_snapshot.asciidoc[] == Tasks APIs diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponse.java index d8de78c3e5b76..ac6e74d39702c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponse.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.delete; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.xcontent.XContentParser; /** * Delete snapshot response @@ -32,4 +33,9 @@ public class DeleteSnapshotResponse extends AcknowledgedResponse { DeleteSnapshotResponse(boolean acknowledged) { super(acknowledged); } + + public static DeleteSnapshotResponse fromXContent(XContentParser parser) { + return new DeleteSnapshotResponse(parseAcknowledged(parser)); + } + } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponseTests.java new file mode 100644 index 0000000000000..d77dadfb81edb --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotResponseTests.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.delete; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +public class DeleteSnapshotResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected DeleteSnapshotResponse doParseInstance(XContentParser parser) { + return DeleteSnapshotResponse.fromXContent(parser); + } + + @Override + protected DeleteSnapshotResponse createBlankInstance() { + return new DeleteSnapshotResponse(); + } + + @Override + protected DeleteSnapshotResponse createTestInstance() { + return new DeleteSnapshotResponse(randomBoolean()); + } +} From f012de0f00d3a6aacf51b122f3c7a0f87010a5f8 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 20 Jun 2018 08:17:11 -0700 Subject: [PATCH 48/92] [DOCS] Move licensing APIs to docs (#31445) --- .../licensing}/delete-license.asciidoc | 2 ++ .../licensing}/get-basic-status.asciidoc | 1 + .../reference/licensing}/get-license.asciidoc | 9 +++----- .../licensing}/get-trial-status.asciidoc | 1 + docs/reference/licensing/index.asciidoc | 22 +++++++++++++++++++ .../reference/licensing}/start-basic.asciidoc | 1 + .../reference/licensing}/start-trial.asciidoc | 1 + .../licensing}/update-license.asciidoc | 2 ++ docs/reference/rest-api/index.asciidoc | 2 +- x-pack/docs/build.gradle | 2 -- x-pack/docs/en/rest-api/licensing.asciidoc | 22 ------------------- 11 files changed, 34 insertions(+), 31 deletions(-) rename {x-pack/docs/en/rest-api/license => docs/reference/licensing}/delete-license.asciidoc (97%) rename {x-pack/docs/en/rest-api/license => docs/reference/licensing}/get-basic-status.asciidoc (98%) rename {x-pack/docs/en/rest-api/license => docs/reference/licensing}/get-license.asciidoc (85%) rename {x-pack/docs/en/rest-api/license => docs/reference/licensing}/get-trial-status.asciidoc (98%) create mode 100644 docs/reference/licensing/index.asciidoc rename {x-pack/docs/en/rest-api/license => docs/reference/licensing}/start-basic.asciidoc (99%) rename {x-pack/docs/en/rest-api/license => docs/reference/licensing}/start-trial.asciidoc (98%) rename {x-pack/docs/en/rest-api/license => docs/reference/licensing}/update-license.asciidoc (99%) delete mode 100644 x-pack/docs/en/rest-api/licensing.asciidoc diff --git a/x-pack/docs/en/rest-api/license/delete-license.asciidoc b/docs/reference/licensing/delete-license.asciidoc similarity index 97% rename from x-pack/docs/en/rest-api/license/delete-license.asciidoc rename to docs/reference/licensing/delete-license.asciidoc index 24662664daa40..b02406045a989 100644 --- a/x-pack/docs/en/rest-api/license/delete-license.asciidoc +++ b/docs/reference/licensing/delete-license.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[delete-license]] === Delete License API @@ -41,3 +42,4 @@ When the license is successfully deleted, the API returns the following response "acknowledged": true } ------------------------------------------------------------ +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/license/get-basic-status.asciidoc b/docs/reference/licensing/get-basic-status.asciidoc similarity index 98% rename from x-pack/docs/en/rest-api/license/get-basic-status.asciidoc rename to docs/reference/licensing/get-basic-status.asciidoc index c6c6385447ab3..a9cc9cf67add6 100644 --- a/x-pack/docs/en/rest-api/license/get-basic-status.asciidoc +++ b/docs/reference/licensing/get-basic-status.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[get-basic-status]] === Get Basic Status API diff --git a/x-pack/docs/en/rest-api/license/get-license.asciidoc b/docs/reference/licensing/get-license.asciidoc similarity index 85% rename from x-pack/docs/en/rest-api/license/get-license.asciidoc rename to docs/reference/licensing/get-license.asciidoc index cba6e71057661..bf094d99f2f5a 100644 --- a/x-pack/docs/en/rest-api/license/get-license.asciidoc +++ b/docs/reference/licensing/get-license.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[get-license]] === Get License API @@ -52,11 +53,9 @@ GET _xpack/license "license" : { "status" : "active", "uid" : "cbff45e7-c553-41f7-ae4f-9205eabd80xx", - "type" : "trial", + "type" : "basic", "issue_date" : "2018-02-22T23:12:05.550Z", "issue_date_in_millis" : 1519341125550, - "expiry_date" : "2018-03-24T23:12:05.550Z", - "expiry_date_in_millis" : 1521933125550, "max_nodes" : 1000, "issued_to" : "test", "issuer" : "elasticsearch", @@ -65,11 +64,9 @@ GET _xpack/license } -------------------------------------------------- // TESTRESPONSE[s/"cbff45e7-c553-41f7-ae4f-9205eabd80xx"/$body.license.uid/] -// TESTRESPONSE[s/"trial"/$body.license.type/] +// TESTRESPONSE[s/"basic"/$body.license.type/] // TESTRESPONSE[s/"2018-02-22T23:12:05.550Z"/$body.license.issue_date/] // TESTRESPONSE[s/1519341125550/$body.license.issue_date_in_millis/] -// TESTRESPONSE[s/"2018-03-24T23:12:05.550Z"/$body.license.expiry_date/] -// TESTRESPONSE[s/1521933125550/$body.license.expiry_date_in_millis/] // TESTRESPONSE[s/1000/$body.license.max_nodes/] // TESTRESPONSE[s/"test"/$body.license.issued_to/] // TESTRESPONSE[s/"elasticsearch"/$body.license.issuer/] diff --git a/x-pack/docs/en/rest-api/license/get-trial-status.asciidoc b/docs/reference/licensing/get-trial-status.asciidoc similarity index 98% rename from x-pack/docs/en/rest-api/license/get-trial-status.asciidoc rename to docs/reference/licensing/get-trial-status.asciidoc index b2cc1ce1b6c88..ec47782a3d2ee 100644 --- a/x-pack/docs/en/rest-api/license/get-trial-status.asciidoc +++ b/docs/reference/licensing/get-trial-status.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[get-trial-status]] === Get Trial Status API diff --git a/docs/reference/licensing/index.asciidoc b/docs/reference/licensing/index.asciidoc new file mode 100644 index 0000000000000..a1dfd398acfe7 --- /dev/null +++ b/docs/reference/licensing/index.asciidoc @@ -0,0 +1,22 @@ +[role="xpack"] +[[licensing-apis]] +== Licensing APIs + +You can use the following APIs to manage your licenses: + +* <> +* <> +* <> +* <> +* <> +* <> +* <> + + +include::delete-license.asciidoc[] +include::get-license.asciidoc[] +include::get-trial-status.asciidoc[] +include::start-trial.asciidoc[] +include::get-basic-status.asciidoc[] +include::start-basic.asciidoc[] +include::update-license.asciidoc[] diff --git a/x-pack/docs/en/rest-api/license/start-basic.asciidoc b/docs/reference/licensing/start-basic.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/license/start-basic.asciidoc rename to docs/reference/licensing/start-basic.asciidoc index 820b2b5eab64a..3206dc0801f36 100644 --- a/x-pack/docs/en/rest-api/license/start-basic.asciidoc +++ b/docs/reference/licensing/start-basic.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[start-basic]] === Start Basic API diff --git a/x-pack/docs/en/rest-api/license/start-trial.asciidoc b/docs/reference/licensing/start-trial.asciidoc similarity index 98% rename from x-pack/docs/en/rest-api/license/start-trial.asciidoc rename to docs/reference/licensing/start-trial.asciidoc index 341c72853fd08..ba1cc0d786693 100644 --- a/x-pack/docs/en/rest-api/license/start-trial.asciidoc +++ b/docs/reference/licensing/start-trial.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[start-trial]] === Start Trial API diff --git a/x-pack/docs/en/rest-api/license/update-license.asciidoc b/docs/reference/licensing/update-license.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/license/update-license.asciidoc rename to docs/reference/licensing/update-license.asciidoc index 54c5539840772..b340cf3ed6ee5 100644 --- a/x-pack/docs/en/rest-api/license/update-license.asciidoc +++ b/docs/reference/licensing/update-license.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[update-license]] === Update License API @@ -123,6 +124,7 @@ receive the following response: } } ------------------------------------------------------------ +// NOTCONSOLE To complete the update, you must re-submit the API request and set the `acknowledge` parameter to `true`. For example: diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index b9d3c9db60a6f..e44eea9aa53f4 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -20,7 +20,7 @@ directly to configure and access {xpack} features. include::info.asciidoc[] include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[] -include::{xes-repo-dir}/rest-api/licensing.asciidoc[] +include::{es-repo-dir}/licensing/index.asciidoc[] include::{xes-repo-dir}/rest-api/migration.asciidoc[] include::{xes-repo-dir}/rest-api/ml-api.asciidoc[] include::{xes-repo-dir}/rest-api/rollup-api.asciidoc[] diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index e97faf12a6cd0..9abca910c5dfc 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -47,8 +47,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/watcher/trigger/schedule/weekly.asciidoc', 'en/watcher/trigger/schedule/yearly.asciidoc', 'en/watcher/troubleshooting.asciidoc', - 'en/rest-api/license/delete-license.asciidoc', - 'en/rest-api/license/update-license.asciidoc', 'en/rest-api/ml/delete-snapshot.asciidoc', 'en/rest-api/ml/forecast.asciidoc', 'en/rest-api/ml/get-bucket.asciidoc', diff --git a/x-pack/docs/en/rest-api/licensing.asciidoc b/x-pack/docs/en/rest-api/licensing.asciidoc deleted file mode 100644 index b30590630f7f9..0000000000000 --- a/x-pack/docs/en/rest-api/licensing.asciidoc +++ /dev/null @@ -1,22 +0,0 @@ -[role="xpack"] -[[licensing-apis]] -== Licensing APIs - -You can use the following APIs to manage your licenses: - -* <> -* <> -* <> -* <> -* <> -* <> -* <> - - -include::license/delete-license.asciidoc[] -include::license/get-license.asciidoc[] -include::license/get-trial-status.asciidoc[] -include::license/start-trial.asciidoc[] -include::license/get-basic-status.asciidoc[] -include::license/start-basic.asciidoc[] -include::license/update-license.asciidoc[] From 6ebe6e32cf3d42f0a82e93915e7f9e2b1b9974f7 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 20 Jun 2018 16:57:51 +0100 Subject: [PATCH 49/92] [DOCS] Omit shard failures assertion for incompatible responses (#31430) Filter out the assertion for _cat and _xpack/ml/datafeed APIs --- .../doc/RestTestsFromSnippetsTask.groovy | 19 ++++++++++++------- .../doc/RestTestsFromSnippetsTaskTest.groovy | 10 +++++++--- 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy index adacc1863c595..3c056a5528b5e 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy @@ -27,7 +27,6 @@ import org.gradle.api.tasks.OutputDirectory import java.nio.file.Files import java.nio.file.Path -import java.util.regex.Matcher /** * Generates REST tests for each snippet marked // TEST. @@ -100,6 +99,14 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { return snippet.language == 'js' || snippet.curl } + /** + * Certain requests should not have the shard failure check because the + * format of the response is incompatible i.e. it is not a JSON object. + */ + static shouldAddShardFailureCheck(String path) { + return path.startsWith('_cat') == false && path.startsWith('_xpack/ml/datafeeds/') == false + } + /** * Converts Kibana's block quoted strings into standard JSON. These * {@code """} delimited strings can be embedded in CONSOLE and can @@ -309,13 +316,11 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { * no shard succeeds. But we need to fail the tests on all of these * because they mean invalid syntax or broken queries or something * else that we don't want to teach people to do. The REST test - * framework doesn't allow us to has assertions in the setup - * section so we have to skip it there. We also have to skip _cat - * actions because they don't return json so we can't is_false - * them. That is ok because they don't have this - * partial-success-is-success thing. + * framework doesn't allow us to have assertions in the setup + * section so we have to skip it there. We also omit the assertion + * from APIs that don't return a JSON object */ - if (false == inSetup && false == path.startsWith('_cat')) { + if (false == inSetup && shouldAddShardFailureCheck(path)) { current.println(" - is_false: _shards.failures") } } diff --git a/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy b/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy index d0a7a2825e6f2..b986319492001 100644 --- a/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy +++ b/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy @@ -19,9 +19,7 @@ package org.elasticsearch.gradle.doc -import org.elasticsearch.gradle.doc.SnippetsTask.Snippet -import org.gradle.api.InvalidUserDataException - +import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.shouldAddShardFailureCheck import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.replaceBlockQuote class RestTestFromSnippetsTaskTest extends GroovyTestCase { @@ -47,4 +45,10 @@ class RestTestFromSnippetsTaskTest extends GroovyTestCase { assertEquals("\"foo\": \"bort\\n baz\"", replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"")); } + + void testIsDocWriteRequest() { + assertTrue(shouldAddShardFailureCheck("doc-index/_search")); + assertFalse(shouldAddShardFailureCheck("_cat")) + assertFalse(shouldAddShardFailureCheck("_xpack/ml/datafeeds/datafeed-id/_preview")); + } } From e2029232256298d89022f5314ba8d174cf43a168 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Wed, 20 Jun 2018 21:42:15 +0300 Subject: [PATCH 50/92] [DOCS] Fix JDBC Maven client group/artifact ID --- x-pack/docs/en/sql/endpoints/jdbc.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc index a980278810e57..6959035bf09e4 100644 --- a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc +++ b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc @@ -14,8 +14,8 @@ The JDBC driver can be obtained either by downloading it from the https://www.el ["source","xml",subs="attributes"] ---- - org.elasticsearch.plugin.jdbc - jdbc + org.elasticsearch.plugin + x-pack-sql-jdbc {version} ---- From 4f850273b363aa6e7e93f778ae30038e34d70abb Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Wed, 20 Jun 2018 13:00:18 -0700 Subject: [PATCH 51/92] extend is-write-index serialization support to 6.4 (#31415) --- .../org/elasticsearch/action/admin/indices/alias/Alias.java | 4 ++-- .../action/admin/indices/alias/IndicesAliasesRequest.java | 4 ++-- .../org/elasticsearch/cluster/metadata/AliasMetaData.java | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java index 10ee8877fc9c9..0f8439643b8f0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java @@ -202,7 +202,7 @@ public void readFrom(StreamInput in) throws IOException { filter = in.readOptionalString(); indexRouting = in.readOptionalString(); searchRouting = in.readOptionalString(); - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { writeIndex = in.readOptionalBoolean(); } else { writeIndex = null; @@ -215,7 +215,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(filter); out.writeOptionalString(indexRouting); out.writeOptionalString(searchRouting); - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeOptionalBoolean(writeIndex); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index 00e3f7e32df3b..c7e7288e74f55 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -235,7 +235,7 @@ public AliasActions(StreamInput in) throws IOException { routing = in.readOptionalString(); searchRouting = in.readOptionalString(); indexRouting = in.readOptionalString(); - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { writeIndex = in.readOptionalBoolean(); } } @@ -249,7 +249,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(routing); out.writeOptionalString(searchRouting); out.writeOptionalString(indexRouting); - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeOptionalBoolean(writeIndex); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java index 29455123287a6..e17103ee30e7f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java @@ -186,7 +186,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(false); } - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeOptionalBoolean(writeIndex()); } } @@ -210,7 +210,7 @@ public AliasMetaData(StreamInput in) throws IOException { searchRouting = null; searchRoutingValues = emptySet(); } - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { writeIndex = in.readOptionalBoolean(); } else { writeIndex = null; From 8bfb9aadd9ae277d72f9b683af781e4231130184 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Wed, 20 Jun 2018 13:00:36 -0700 Subject: [PATCH 52/92] lower rollover-info version bound to 6.4 (#31414) This feature is supported in 6.4, this commit reflects that --- .../org/elasticsearch/cluster/metadata/IndexMetaData.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 9e4f849787867..d978e214fc96c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -693,7 +693,7 @@ public Diff readDiff(StreamInput in, String key) throws IOException { }); inSyncAllocationIds = DiffableUtils.readImmutableOpenIntMapDiff(in, DiffableUtils.getVIntKeySerializer(), DiffableUtils.StringSetValueSerializer.getInstance()); - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { rolloverInfos = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), RolloverInfo::new, RolloverInfo::readDiffFrom); } else { @@ -714,7 +714,7 @@ public void writeTo(StreamOutput out) throws IOException { aliases.writeTo(out); customs.writeTo(out); inSyncAllocationIds.writeTo(out); - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { rolloverInfos.writeTo(out); } } @@ -765,7 +765,7 @@ public static IndexMetaData readFrom(StreamInput in) throws IOException { Set allocationIds = DiffableUtils.StringSetValueSerializer.getInstance().read(in, key); builder.putInSyncAllocationIds(key, allocationIds); } - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { int rolloverAliasesSize = in.readVInt(); for (int i = 0; i < rolloverAliasesSize; i++) { builder.putRolloverInfo(new RolloverInfo(in)); @@ -800,7 +800,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(cursor.key); DiffableUtils.StringSetValueSerializer.getInstance().write(cursor.value, out); } - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeVInt(rolloverInfos.size()); for (ObjectCursor cursor : rolloverInfos.values()) { cursor.value.writeTo(out); From 9ab1325953b30fb3d881e9c27b2afdd3274322b6 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 20 Jun 2018 16:34:56 -0600 Subject: [PATCH 53/92] Introduce http and tcp server channels (#31446) Historically in TcpTransport server channels were represented by the same channel interface as socket channels. This was necessary as TcpTransport was parameterized by the channel type. This commit introduces TcpServerChannel and HttpServerChannel classes. Additionally, it adds the implementations for the various transports. This allows server channels to have unique functionality and not implement the methods they do not support (such as send and getRemoteAddress). Additionally, with the introduction of HttpServerChannel this commit extracts some of the storing and closing channel work to the abstract http server transport. --- .../http/netty4/Netty4HttpRequestHandler.java | 11 +- .../http/netty4/Netty4HttpServerChannel.java | 76 ++++++++++ .../netty4/Netty4HttpServerTransport.java | 116 +++++---------- .../netty4/Netty4MessageChannelHandler.java | 18 ++- ...yTcpChannel.java => Netty4TcpChannel.java} | 6 +- .../netty4/Netty4TcpServerChannel.java | 84 +++++++++++ .../transport/netty4/Netty4Transport.java | 45 +++--- .../Netty4SizeHeaderFrameDecoderTests.java | 2 +- .../http/nio/NioHttpServerChannel.java | 44 ++++++ .../http/nio/NioHttpServerTransport.java | 134 +++--------------- .../transport/nio/NioTcpServerChannel.java | 23 +-- .../transport/nio/NioTransport.java | 4 +- .../http/AbstractHttpServerTransport.java | 92 ++++++++++-- .../elasticsearch/http/HttpServerChannel.java | 34 +++++ .../transport/TcpServerChannel.java | 46 ++++++ .../elasticsearch/transport/TcpTransport.java | 27 ++-- .../AbstractHttpServerTransportTests.java | 13 +- .../transport/TcpTransportTests.java | 6 +- .../transport/MockTcpTransport.java | 2 +- .../transport/nio/MockNioTransport.java | 18 +-- .../netty4/SecurityNetty4Transport.java | 2 +- .../transport/ServerTransportFilter.java | 6 +- .../transport/nio/SecurityNioTransport.java | 6 +- 23 files changed, 501 insertions(+), 314 deletions(-) create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerChannel.java rename modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/{NettyTcpChannel.java => Netty4TcpChannel.java} (96%) create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpServerChannel.java create mode 100644 plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java create mode 100644 server/src/main/java/org/elasticsearch/http/HttpServerChannel.java create mode 100644 server/src/main/java/org/elasticsearch/transport/TcpServerChannel.java diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java index 124bd607ab7ae..ab078ad10d337 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java @@ -29,8 +29,6 @@ import org.elasticsearch.http.HttpPipelinedRequest; import org.elasticsearch.transport.netty4.Netty4Utils; -import static org.elasticsearch.http.netty4.Netty4HttpServerTransport.HTTP_CHANNEL_KEY; - @ChannelHandler.Sharable class Netty4HttpRequestHandler extends SimpleChannelInboundHandler> { @@ -42,7 +40,7 @@ class Netty4HttpRequestHandler extends SimpleChannelInboundHandler msg) throws Exception { - Netty4HttpChannel channel = ctx.channel().attr(HTTP_CHANNEL_KEY).get(); + Netty4HttpChannel channel = ctx.channel().attr(Netty4HttpServerTransport.HTTP_CHANNEL_KEY).get(); FullHttpRequest request = msg.getRequest(); try { @@ -77,12 +75,11 @@ protected void channelRead0(ChannelHandlerContext ctx, HttpPipelinedRequest closeContext = new CompletableContext<>(); + + Netty4HttpServerChannel(Channel channel) { + this.channel = channel; + this.channel.closeFuture().addListener(f -> { + if (f.isSuccess()) { + closeContext.complete(null); + } else { + Throwable cause = f.cause(); + if (cause instanceof Error) { + Netty4Utils.maybeDie(cause); + closeContext.completeExceptionally(new Exception(cause)); + } else { + closeContext.completeExceptionally((Exception) cause); + } + } + }); + } + + @Override + public InetSocketAddress getLocalAddress() { + return (InetSocketAddress) channel.localAddress(); + } + + @Override + public void addCloseListener(ActionListener listener) { + closeContext.addListener(ActionListener.toBiConsumer(listener)); + } + + @Override + public boolean isOpen() { + return channel.isOpen(); + } + + @Override + public void close() { + channel.close(); + } + + @Override + public String toString() { + return "Netty4HttpChannel{localAddress=" + getLocalAddress() + "}"; + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 8a49ce38b89bc..34f00c0684040 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -23,6 +23,7 @@ import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandler; +import io.netty.channel.ChannelHandlerAdapter; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; @@ -42,22 +43,19 @@ import io.netty.util.AttributeKey; import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.CloseableChannel; -import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.AbstractHttpServerTransport; -import org.elasticsearch.http.BindHttpException; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpHandlingSettings; -import org.elasticsearch.http.HttpStats; +import org.elasticsearch.http.HttpServerChannel; import org.elasticsearch.http.netty4.cors.Netty4CorsConfig; import org.elasticsearch.http.netty4.cors.Netty4CorsConfigBuilder; import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; @@ -65,14 +63,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.netty4.Netty4Utils; -import java.io.IOException; -import java.net.InetAddress; import java.net.InetSocketAddress; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Pattern; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; @@ -154,12 +147,6 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { private final int pipeliningMaxEvents; - private final boolean tcpNoDelay; - private final boolean tcpKeepAlive; - private final boolean reuseAddress; - - private final ByteSizeValue tcpSendBufferSize; - private final ByteSizeValue tcpReceiveBufferSize; private final RecvByteBufAllocator recvByteBufAllocator; private final int readTimeoutMillis; @@ -167,8 +154,6 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { protected volatile ServerBootstrap serverBootstrap; - protected final List serverChannels = new ArrayList<>(); - private final Netty4CorsConfig corsConfig; public Netty4HttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool, @@ -184,11 +169,6 @@ public Netty4HttpServerTransport(Settings settings, NetworkService networkServic this.maxCompositeBufferComponents = SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS.get(settings); this.workerCount = SETTING_HTTP_WORKER_COUNT.get(settings); - this.tcpNoDelay = SETTING_HTTP_TCP_NO_DELAY.get(settings); - this.tcpKeepAlive = SETTING_HTTP_TCP_KEEP_ALIVE.get(settings); - this.reuseAddress = SETTING_HTTP_TCP_REUSE_ADDRESS.get(settings); - this.tcpSendBufferSize = SETTING_HTTP_TCP_SEND_BUFFER_SIZE.get(settings); - this.tcpReceiveBufferSize = SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE.get(settings); this.readTimeoutMillis = Math.toIntExact(SETTING_HTTP_READ_TIMEOUT.get(settings).getMillis()); ByteSizeValue receivePredictor = SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE.get(settings); @@ -217,6 +197,7 @@ protected void doStart() { serverBootstrap.channel(NioServerSocketChannel.class); serverBootstrap.childHandler(configureServerChannelHandler()); + serverBootstrap.handler(new ServerChannelExceptionHandler(this)); serverBootstrap.childOption(ChannelOption.TCP_NODELAY, SETTING_HTTP_TCP_NO_DELAY.get(settings)); serverBootstrap.childOption(ChannelOption.SO_KEEPALIVE, SETTING_HTTP_TCP_KEEP_ALIVE.get(settings)); @@ -238,10 +219,7 @@ protected void doStart() { serverBootstrap.option(ChannelOption.SO_REUSEADDR, reuseAddress); serverBootstrap.childOption(ChannelOption.SO_REUSEADDR, reuseAddress); - this.boundAddress = createBoundHttpAddress(); - if (logger.isInfoEnabled()) { - logger.info("{}", boundAddress); - } + bindServer(); success = true; } finally { if (success == false) { @@ -284,78 +262,29 @@ static Netty4CorsConfig buildCorsConfig(Settings settings) { } @Override - protected TransportAddress bindAddress(final InetAddress hostAddress) { - final AtomicReference lastException = new AtomicReference<>(); - final AtomicReference boundSocket = new AtomicReference<>(); - boolean success = port.iterate(portNumber -> { - try { - synchronized (serverChannels) { - ChannelFuture future = serverBootstrap.bind(new InetSocketAddress(hostAddress, portNumber)).sync(); - serverChannels.add(future.channel()); - boundSocket.set((InetSocketAddress) future.channel().localAddress()); - } - } catch (Exception e) { - lastException.set(e); - return false; - } - return true; - }); - if (!success) { - throw new BindHttpException("Failed to bind to [" + port.getPortRangeString() + "]", lastException.get()); - } - - if (logger.isDebugEnabled()) { - logger.debug("Bound http to address {{}}", NetworkAddress.format(boundSocket.get())); - } - return new TransportAddress(boundSocket.get()); + protected HttpServerChannel bind(InetSocketAddress socketAddress) throws Exception { + ChannelFuture future = serverBootstrap.bind(socketAddress).sync(); + Channel channel = future.channel(); + Netty4HttpServerChannel httpServerChannel = new Netty4HttpServerChannel(channel); + channel.attr(HTTP_SERVER_CHANNEL_KEY).set(httpServerChannel); + return httpServerChannel; } @Override - protected void doStop() { - synchronized (serverChannels) { - if (!serverChannels.isEmpty()) { - try { - Netty4Utils.closeChannels(serverChannels); - } catch (IOException e) { - logger.trace("exception while closing channels", e); - } finally { - serverChannels.clear(); - } - } - } - - // TODO: Move all of channel closing to abstract class once server channels are handled - try { - CloseableChannel.closeChannels(new ArrayList<>(httpChannels), true); - } catch (Exception e) { - logger.warn("unexpected exception while closing http channels", e); - } - httpChannels.clear(); - - - + protected void stopInternal() { if (serverBootstrap != null) { serverBootstrap.config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS).awaitUninterruptibly(); serverBootstrap = null; } } - @Override - protected void doClose() { - } - - @Override - public HttpStats stats() { - return new HttpStats(httpChannels.size(), totalChannelsAccepted.get()); - } - @Override protected void onException(HttpChannel channel, Exception cause) { if (cause instanceof ReadTimeoutException) { if (logger.isTraceEnabled()) { logger.trace("Http read timeout {}", channel); } - CloseableChannel.closeChannel(channel);; + CloseableChannel.closeChannel(channel); } else { super.onException(channel, cause); } @@ -366,6 +295,7 @@ public ChannelHandler configureServerChannelHandler() { } static final AttributeKey HTTP_CHANNEL_KEY = AttributeKey.newInstance("es-http-channel"); + static final AttributeKey HTTP_SERVER_CHANNEL_KEY = AttributeKey.newInstance("es-http-server-channel"); protected static class HttpChannelHandler extends ChannelInitializer { @@ -413,4 +343,24 @@ public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws E } } + @ChannelHandler.Sharable + private static class ServerChannelExceptionHandler extends ChannelHandlerAdapter { + + private final Netty4HttpServerTransport transport; + + private ServerChannelExceptionHandler(Netty4HttpServerTransport transport) { + this.transport = transport; + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { + Netty4Utils.maybeDie(cause); + Netty4HttpServerChannel httpServerChannel = ctx.channel().attr(HTTP_SERVER_CHANNEL_KEY).get(); + if (cause instanceof Error) { + transport.onServerException(httpServerChannel, new Exception(cause)); + } else { + transport.onServerException(httpServerChannel, (Exception) cause); + } + } + } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java index 58440ae96e07a..698c86d048c1c 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java @@ -24,6 +24,8 @@ import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.util.Attribute; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.transport.TcpHeader; import org.elasticsearch.transport.Transports; @@ -36,11 +38,9 @@ final class Netty4MessageChannelHandler extends ChannelDuplexHandler { private final Netty4Transport transport; - private final String profileName; - Netty4MessageChannelHandler(Netty4Transport transport, String profileName) { + Netty4MessageChannelHandler(Netty4Transport transport) { this.transport = transport; - this.profileName = profileName; } @Override @@ -58,7 +58,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception // netty always copies a buffer, either in NioWorker in its read handler, where it copies to a fresh // buffer, or in the cumulative buffer, which is cleaned each time so it could be bigger than the actual size BytesReference reference = Netty4Utils.toBytesReference(buffer, remainingMessageSize); - Attribute channelAttribute = channel.attr(Netty4Transport.CHANNEL_KEY); + Attribute channelAttribute = channel.attr(Netty4Transport.CHANNEL_KEY); transport.messageReceived(reference, channelAttribute.get()); } finally { // Set the expected position of the buffer, no matter what happened @@ -69,7 +69,13 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { Netty4Utils.maybeDie(cause); - transport.exceptionCaught(ctx, cause); + final Throwable unwrapped = ExceptionsHelper.unwrap(cause, ElasticsearchException.class); + final Throwable newCause = unwrapped != null ? unwrapped : cause; + Netty4TcpChannel tcpChannel = ctx.channel().attr(Netty4Transport.CHANNEL_KEY).get(); + if (newCause instanceof Error) { + transport.onException(tcpChannel, new Exception(newCause)); + } else { + transport.onException(tcpChannel, (Exception) newCause); + } } - } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyTcpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java similarity index 96% rename from modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyTcpChannel.java rename to modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java index 89fabdcd763d1..78a1425500072 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyTcpChannel.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java @@ -30,13 +30,13 @@ import java.net.InetSocketAddress; -public class NettyTcpChannel implements TcpChannel { +public class Netty4TcpChannel implements TcpChannel { private final Channel channel; private final String profile; private final CompletableContext closeContext = new CompletableContext<>(); - NettyTcpChannel(Channel channel, String profile) { + Netty4TcpChannel(Channel channel, String profile) { this.channel = channel; this.profile = profile; this.channel.closeFuture().addListener(f -> { @@ -118,7 +118,7 @@ public Channel getLowLevelChannel() { @Override public String toString() { - return "NettyTcpChannel{" + + return "Netty4TcpChannel{" + "localAddress=" + getLocalAddress() + ", remoteAddress=" + channel.remoteAddress() + '}'; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpServerChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpServerChannel.java new file mode 100644 index 0000000000000..873a6c33fba11 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpServerChannel.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.channel.Channel; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.concurrent.CompletableContext; +import org.elasticsearch.transport.TcpServerChannel; + +import java.net.InetSocketAddress; + +public class Netty4TcpServerChannel implements TcpServerChannel { + + private final Channel channel; + private final String profile; + private final CompletableContext closeContext = new CompletableContext<>(); + + Netty4TcpServerChannel(Channel channel, String profile) { + this.channel = channel; + this.profile = profile; + this.channel.closeFuture().addListener(f -> { + if (f.isSuccess()) { + closeContext.complete(null); + } else { + Throwable cause = f.cause(); + if (cause instanceof Error) { + Netty4Utils.maybeDie(cause); + closeContext.completeExceptionally(new Exception(cause)); + } else { + closeContext.completeExceptionally((Exception) cause); + } + } + }); + } + + @Override + public String getProfile() { + return profile; + } + + @Override + public InetSocketAddress getLocalAddress() { + return (InetSocketAddress) channel.localAddress(); + } + + @Override + public void close() { + channel.close(); + } + + @Override + public void addCloseListener(ActionListener listener) { + closeContext.addListener(ActionListener.toBiConsumer(listener)); + } + + @Override + public boolean isOpen() { + return channel.isOpen(); + } + + @Override + public String toString() { + return "Netty4TcpChannel{" + + "localAddress=" + getLocalAddress() + + '}'; + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index 466c4b68bfa4e..c8c6fceb54304 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -25,6 +25,7 @@ import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandler; +import io.netty.channel.ChannelHandlerAdapter; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; @@ -37,8 +38,6 @@ import io.netty.util.concurrent.Future; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.Tuple; @@ -196,6 +195,7 @@ private void createServerBootstrap(ProfileSettings profileSettings) { serverBootstrap.channel(NioServerSocketChannel.class); serverBootstrap.childHandler(getServerChannelInitializer(name)); + serverBootstrap.handler(new ServerChannelExceptionHandler()); serverBootstrap.childOption(ChannelOption.TCP_NODELAY, profileSettings.tcpNoDelay); serverBootstrap.childOption(ChannelOption.SO_KEEPALIVE, profileSettings.tcpKeepAlive); @@ -226,17 +226,11 @@ protected ChannelHandler getClientChannelInitializer() { return new ClientChannelInitializer(); } - static final AttributeKey CHANNEL_KEY = AttributeKey.newInstance("es-channel"); - - protected final void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { - final Throwable unwrapped = ExceptionsHelper.unwrap(cause, ElasticsearchException.class); - final Throwable t = unwrapped != null ? unwrapped : cause; - Channel channel = ctx.channel(); - onException(channel.attr(CHANNEL_KEY).get(), t instanceof Exception ? (Exception) t : new ElasticsearchException(t)); - } + static final AttributeKey CHANNEL_KEY = AttributeKey.newInstance("es-channel"); + static final AttributeKey SERVER_CHANNEL_KEY = AttributeKey.newInstance("es-server-channel"); @Override - protected NettyTcpChannel initiateChannel(InetSocketAddress address, ActionListener listener) throws IOException { + protected Netty4TcpChannel initiateChannel(InetSocketAddress address, ActionListener listener) throws IOException { ChannelFuture channelFuture = bootstrap.connect(address); Channel channel = channelFuture.channel(); if (channel == null) { @@ -245,7 +239,7 @@ protected NettyTcpChannel initiateChannel(InetSocketAddress address, ActionListe } addClosedExceptionLogger(channel); - NettyTcpChannel nettyChannel = new NettyTcpChannel(channel, "default"); + Netty4TcpChannel nettyChannel = new Netty4TcpChannel(channel, "default"); channel.attr(CHANNEL_KEY).set(nettyChannel); channelFuture.addListener(f -> { @@ -266,10 +260,10 @@ protected NettyTcpChannel initiateChannel(InetSocketAddress address, ActionListe } @Override - protected NettyTcpChannel bind(String name, InetSocketAddress address) { + protected Netty4TcpServerChannel bind(String name, InetSocketAddress address) { Channel channel = serverBootstraps.get(name).bind(address).syncUninterruptibly().channel(); - NettyTcpChannel esChannel = new NettyTcpChannel(channel, name); - channel.attr(CHANNEL_KEY).set(esChannel); + Netty4TcpServerChannel esChannel = new Netty4TcpServerChannel(channel, name); + channel.attr(SERVER_CHANNEL_KEY).set(esChannel); return esChannel; } @@ -310,7 +304,7 @@ protected void initChannel(Channel ch) throws Exception { ch.pipeline().addLast("logging", new ESLoggingHandler()); ch.pipeline().addLast("size", new Netty4SizeHeaderFrameDecoder()); // using a dot as a prefix means this cannot come from any settings parsed - ch.pipeline().addLast("dispatcher", new Netty4MessageChannelHandler(Netty4Transport.this, ".client")); + ch.pipeline().addLast("dispatcher", new Netty4MessageChannelHandler(Netty4Transport.this)); } @Override @@ -331,11 +325,11 @@ protected ServerChannelInitializer(String name) { @Override protected void initChannel(Channel ch) throws Exception { addClosedExceptionLogger(ch); - NettyTcpChannel nettyTcpChannel = new NettyTcpChannel(ch, name); + Netty4TcpChannel nettyTcpChannel = new Netty4TcpChannel(ch, name); ch.attr(CHANNEL_KEY).set(nettyTcpChannel); ch.pipeline().addLast("logging", new ESLoggingHandler()); ch.pipeline().addLast("size", new Netty4SizeHeaderFrameDecoder()); - ch.pipeline().addLast("dispatcher", new Netty4MessageChannelHandler(Netty4Transport.this, name)); + ch.pipeline().addLast("dispatcher", new Netty4MessageChannelHandler(Netty4Transport.this)); serverAcceptedChannel(nettyTcpChannel); } @@ -353,4 +347,19 @@ private void addClosedExceptionLogger(Channel channel) { } }); } + + @ChannelHandler.Sharable + private class ServerChannelExceptionHandler extends ChannelHandlerAdapter { + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { + Netty4Utils.maybeDie(cause); + Netty4TcpServerChannel serverChannel = ctx.channel().attr(SERVER_CHANNEL_KEY).get(); + if (cause instanceof Error) { + onServerException(serverChannel, new Exception(cause)); + } else { + onServerException(serverChannel, (Exception) cause); + } + } + } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java index 7343da6c3b11a..4c783cf078769 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java @@ -70,7 +70,7 @@ public void startThreadPool() { nettyTransport.start(); TransportAddress[] boundAddresses = nettyTransport.boundAddress().boundAddresses(); - TransportAddress transportAddress = (TransportAddress) randomFrom(boundAddresses); + TransportAddress transportAddress = randomFrom(boundAddresses); port = transportAddress.address().getPort(); host = transportAddress.address().getAddress(); } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java new file mode 100644 index 0000000000000..2674d38dc490e --- /dev/null +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.nio; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.http.HttpServerChannel; +import org.elasticsearch.nio.NioServerSocketChannel; + +import java.io.IOException; +import java.nio.channels.ServerSocketChannel; + +public class NioHttpServerChannel extends NioServerSocketChannel implements HttpServerChannel { + + NioHttpServerChannel(ServerSocketChannel serverSocketChannel) throws IOException { + super(serverSocketChannel); + } + + @Override + public void addCloseListener(ActionListener listener) { + addCloseListener(ActionListener.toBiConsumer(listener)); + } + + @Override + public String toString() { + return "NioHttpServerChannel{localAddress=" + getLocalAddress() + "}"; + } +} diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java index aa0859e6146f2..b80778e964293 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java @@ -21,40 +21,29 @@ import io.netty.handler.codec.http.HttpMethod; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.network.CloseableChannel; -import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.AbstractHttpServerTransport; -import org.elasticsearch.http.BindHttpException; import org.elasticsearch.http.HttpChannel; +import org.elasticsearch.http.HttpServerChannel; import org.elasticsearch.http.HttpServerTransport; -import org.elasticsearch.http.HttpStats; import org.elasticsearch.http.nio.cors.NioCorsConfig; import org.elasticsearch.http.nio.cors.NioCorsConfigBuilder; import org.elasticsearch.nio.BytesChannelContext; import org.elasticsearch.nio.ChannelFactory; import org.elasticsearch.nio.EventHandler; import org.elasticsearch.nio.InboundChannelBuffer; -import org.elasticsearch.nio.NioChannel; import org.elasticsearch.nio.NioGroup; import org.elasticsearch.nio.NioSelector; -import org.elasticsearch.nio.NioServerSocketChannel; import org.elasticsearch.nio.NioSocketChannel; import org.elasticsearch.nio.ServerChannelContext; import org.elasticsearch.nio.SocketChannelContext; @@ -62,18 +51,11 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; -import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.regex.Pattern; @@ -113,7 +95,6 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { private final int tcpSendBufferSize; private final int tcpReceiveBufferSize; - private final Set serverChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); private NioGroup nioGroup; private HttpChannelFactory channelFactory; private final NioCorsConfig corsConfig; @@ -156,12 +137,7 @@ protected void doStart() { daemonThreadFactory(this.settings, HTTP_SERVER_WORKER_THREAD_NAME_PREFIX), workerCount, (s) -> new EventHandler(this::onNonChannelException, s)); channelFactory = new HttpChannelFactory(); - this.boundAddress = createBoundHttpAddress(); - - if (logger.isInfoEnabled()) { - logger.info("{}", boundAddress); - } - + bindServer(); success = true; } catch (IOException e) { throw new ElasticsearchException(e); @@ -173,26 +149,7 @@ protected void doStart() { } @Override - protected void doStop() { - synchronized (serverChannels) { - if (serverChannels.isEmpty() == false) { - try { - closeChannels(new ArrayList<>(serverChannels)); - } catch (Exception e) { - logger.error("unexpected exception while closing http server channels", e); - } - serverChannels.clear(); - } - } - - // TODO: Move all of channel closing to abstract class once server channels are handled - try { - CloseableChannel.closeChannels(new ArrayList<>(httpChannels), true); - } catch (Exception e) { - logger.warn("unexpected exception while closing http channels", e); - } - httpChannels.clear(); - + protected void stopInternal() { try { nioGroup.close(); } catch (Exception e) { @@ -201,40 +158,8 @@ protected void doStop() { } @Override - protected void doClose() throws IOException { - } - - @Override - protected TransportAddress bindAddress(InetAddress hostAddress) { - final AtomicReference lastException = new AtomicReference<>(); - final AtomicReference boundSocket = new AtomicReference<>(); - boolean success = port.iterate(portNumber -> { - try { - synchronized (serverChannels) { - InetSocketAddress address = new InetSocketAddress(hostAddress, portNumber); - NioServerSocketChannel channel = nioGroup.bindServerChannel(address, channelFactory); - serverChannels.add(channel); - boundSocket.set(channel.getLocalAddress()); - } - } catch (Exception e) { - lastException.set(e); - return false; - } - return true; - }); - if (success == false) { - throw new BindHttpException("Failed to bind to [" + port.getPortRangeString() + "]", lastException.get()); - } - - if (logger.isDebugEnabled()) { - logger.debug("Bound http to address {{}}", NetworkAddress.format(boundSocket.get())); - } - return new TransportAddress(boundSocket.get()); - } - - @Override - public HttpStats stats() { - return new HttpStats(serverChannels.size(), totalChannelsAccepted.get()); + protected HttpServerChannel bind(InetSocketAddress socketAddress) throws IOException { + return nioGroup.bindServerChannel(socketAddress, channelFactory); } static NioCorsConfig buildCorsConfig(Settings settings) { @@ -269,33 +194,11 @@ static NioCorsConfig buildCorsConfig(Settings settings) { .build(); } - private void closeChannels(List channels) { - List> futures = new ArrayList<>(channels.size()); - - for (NioChannel channel : channels) { - PlainActionFuture future = PlainActionFuture.newFuture(); - channel.addCloseListener(ActionListener.toBiConsumer(future)); - futures.add(future); - channel.close(); - } - - List closeExceptions = new ArrayList<>(); - for (ActionFuture f : futures) { - try { - f.actionGet(); - } catch (RuntimeException e) { - closeExceptions.add(e); - } - } - - ExceptionsHelper.rethrowAndSuppress(closeExceptions); - } - private void acceptChannel(NioSocketChannel socketChannel) { super.serverAcceptedChannel((HttpChannel) socketChannel); } - private class HttpChannelFactory extends ChannelFactory { + private class HttpChannelFactory extends ChannelFactory { private HttpChannelFactory() { super(new RawChannelFactory(tcpNoDelay, tcpKeepAlive, reuseAddress, tcpSendBufferSize, tcpReceiveBufferSize)); @@ -303,29 +206,28 @@ private HttpChannelFactory() { @Override public NioHttpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { - NioHttpChannel nioChannel = new NioHttpChannel(channel); + NioHttpChannel httpChannel = new NioHttpChannel(channel); java.util.function.Supplier pageSupplier = () -> { Recycler.V bytes = pageCacheRecycler.bytePage(false); return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close); }; - HttpReadWriteHandler httpReadWritePipeline = new HttpReadWriteHandler(nioChannel,NioHttpServerTransport.this, + HttpReadWriteHandler httpReadWritePipeline = new HttpReadWriteHandler(httpChannel,NioHttpServerTransport.this, handlingSettings, corsConfig); - Consumer exceptionHandler = (e) -> onException(nioChannel, e); - SocketChannelContext context = new BytesChannelContext(nioChannel, selector, exceptionHandler, httpReadWritePipeline, + Consumer exceptionHandler = (e) -> onException(httpChannel, e); + SocketChannelContext context = new BytesChannelContext(httpChannel, selector, exceptionHandler, httpReadWritePipeline, new InboundChannelBuffer(pageSupplier)); - nioChannel.setContext(context); - return nioChannel; + httpChannel.setContext(context); + return httpChannel; } @Override - public NioServerSocketChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { - NioServerSocketChannel nioChannel = new NioServerSocketChannel(channel); - Consumer exceptionHandler = (e) -> logger.error(() -> - new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); + public NioHttpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { + NioHttpServerChannel httpServerChannel = new NioHttpServerChannel(channel); + Consumer exceptionHandler = (e) -> onServerException(httpServerChannel, e); Consumer acceptor = NioHttpServerTransport.this::acceptChannel; - ServerChannelContext context = new ServerChannelContext(nioChannel, this, selector, acceptor, exceptionHandler); - nioChannel.setContext(context); - return nioChannel; + ServerChannelContext context = new ServerChannelContext(httpServerChannel, this, selector, acceptor, exceptionHandler); + httpServerChannel.setContext(context); + return httpServerChannel; } } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java index 10bf4ed752321..3c6d4b12df943 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java @@ -20,19 +20,17 @@ package org.elasticsearch.transport.nio; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.nio.NioServerSocketChannel; -import org.elasticsearch.transport.TcpChannel; +import org.elasticsearch.transport.TcpServerChannel; import java.io.IOException; -import java.net.InetSocketAddress; import java.nio.channels.ServerSocketChannel; /** - * This is an implementation of {@link NioServerSocketChannel} that adheres to the {@link TcpChannel} + * This is an implementation of {@link NioServerSocketChannel} that adheres to the {@link TcpServerChannel} * interface. As it is a server socket, setting SO_LINGER and sending messages is not supported. */ -public class NioTcpServerChannel extends NioServerSocketChannel implements TcpChannel { +public class NioTcpServerChannel extends NioServerSocketChannel implements TcpServerChannel { private final String profile; @@ -41,21 +39,6 @@ public NioTcpServerChannel(String profile, ServerSocketChannel socketChannel) th this.profile = profile; } - @Override - public void sendMessage(BytesReference reference, ActionListener listener) { - throw new UnsupportedOperationException("Cannot send a message to a server channel."); - } - - @Override - public void setSoLinger(int value) throws IOException { - throw new UnsupportedOperationException("Cannot set SO_LINGER on a server channel."); - } - - @Override - public InetSocketAddress getRemoteAddress() { - return null; - } - @Override public void close() { getContext().closeChannel(); diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java index cf7d37493cb38..47229a0df2f6e 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java @@ -19,7 +19,6 @@ package org.elasticsearch.transport.nio; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -176,8 +175,7 @@ public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel) @Override public NioTcpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { NioTcpServerChannel nioChannel = new NioTcpServerChannel(profileName, channel); - Consumer exceptionHandler = (e) -> logger.error(() -> - new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); + Consumer exceptionHandler = (e) -> onServerException(nioChannel, e); Consumer acceptor = NioTransport.this::acceptChannel; ServerChannelContext context = new ServerChannelContext(nioChannel, this, selector, acceptor, exceptionHandler); nioChannel.setContext(context); diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index 9d9008f7fb879..622020d6451db 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.network.CloseableChannel; +import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -53,6 +54,7 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_BIND_HOST; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH; @@ -74,9 +76,10 @@ public abstract class AbstractHttpServerTransport extends AbstractLifecycleCompo private final String[] bindHosts; private final String[] publishHosts; - protected final AtomicLong totalChannelsAccepted = new AtomicLong(); - protected final Set httpChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); - protected volatile BoundTransportAddress boundAddress; + private volatile BoundTransportAddress boundAddress; + private final AtomicLong totalChannelsAccepted = new AtomicLong(); + private final Set httpChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); + private final Set httpServerChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); protected AbstractHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool, NamedXContentRegistry xContentRegistry, Dispatcher dispatcher) { @@ -116,7 +119,12 @@ public HttpInfo info() { return new HttpInfo(boundTransportAddress, maxContentLength.getBytes()); } - protected BoundTransportAddress createBoundHttpAddress() { + @Override + public HttpStats stats() { + return new HttpStats(httpChannels.size(), totalChannelsAccepted.get()); + } + + protected void bindServer() { // Bind and start to accept incoming connections. InetAddress hostAddresses[]; try { @@ -138,11 +146,71 @@ protected BoundTransportAddress createBoundHttpAddress() { } final int publishPort = resolvePublishPort(settings, boundAddresses, publishInetAddress); - final InetSocketAddress publishAddress = new InetSocketAddress(publishInetAddress, publishPort); - return new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[0]), new TransportAddress(publishAddress)); + TransportAddress publishAddress = new TransportAddress(new InetSocketAddress(publishInetAddress, publishPort)); + this.boundAddress = new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[0]), publishAddress); + logger.info("{}", boundAddress); + } + + private TransportAddress bindAddress(final InetAddress hostAddress) { + final AtomicReference lastException = new AtomicReference<>(); + final AtomicReference boundSocket = new AtomicReference<>(); + boolean success = port.iterate(portNumber -> { + try { + synchronized (httpServerChannels) { + HttpServerChannel httpServerChannel = bind(new InetSocketAddress(hostAddress, portNumber)); + httpServerChannels.add(httpServerChannel); + boundSocket.set(httpServerChannel.getLocalAddress()); + } + } catch (Exception e) { + lastException.set(e); + return false; + } + return true; + }); + if (!success) { + throw new BindHttpException("Failed to bind to [" + port.getPortRangeString() + "]", lastException.get()); + } + + if (logger.isDebugEnabled()) { + logger.debug("Bound http to address {{}}", NetworkAddress.format(boundSocket.get())); + } + return new TransportAddress(boundSocket.get()); + } + + protected abstract HttpServerChannel bind(InetSocketAddress hostAddress) throws Exception; + + @Override + protected void doStop() { + synchronized (httpServerChannels) { + if (httpServerChannels.isEmpty() == false) { + try { + CloseableChannel.closeChannels(new ArrayList<>(httpServerChannels), true); + } catch (Exception e) { + logger.warn("exception while closing channels", e); + } finally { + httpServerChannels.clear(); + } + } + } + + try { + CloseableChannel.closeChannels(new ArrayList<>(httpChannels), true); + } catch (Exception e) { + logger.warn("unexpected exception while closing http channels", e); + } + httpChannels.clear(); + + stopInternal(); } - protected abstract TransportAddress bindAddress(InetAddress hostAddress); + @Override + protected void doClose() { + } + + /** + * Called to tear down internal resources + */ + protected abstract void stopInternal(); // package private for tests static int resolvePublishPort(Settings settings, List boundAddresses, InetAddress publishInetAddress) { @@ -197,19 +265,23 @@ protected void onException(HttpChannel channel, Exception e) { CloseableChannel.closeChannel(channel); } else { logger.warn(() -> new ParameterizedMessage( - "caught exception while handling client http traffic, closing connection {}", channel), e); + "caught exception while handling client http traffic, closing connection {}", channel), e); CloseableChannel.closeChannel(channel); } } + protected void onServerException(HttpServerChannel channel, Exception e) { + logger.error(new ParameterizedMessage("exception from http server channel caught on transport layer [channel={}]", channel), e); + } + /** * Exception handler for exceptions that are not associated with a specific channel. * * @param exception the exception */ protected void onNonChannelException(Exception exception) { - logger.warn(new ParameterizedMessage("exception caught on transport layer [thread={}]", Thread.currentThread().getName()), - exception); + String threadName = Thread.currentThread().getName(); + logger.warn(new ParameterizedMessage("exception caught on transport layer [thread={}]", threadName), exception); } protected void serverAcceptedChannel(HttpChannel httpChannel) { diff --git a/server/src/main/java/org/elasticsearch/http/HttpServerChannel.java b/server/src/main/java/org/elasticsearch/http/HttpServerChannel.java new file mode 100644 index 0000000000000..e4222ae816806 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/http/HttpServerChannel.java @@ -0,0 +1,34 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http; + +import org.elasticsearch.common.network.CloseableChannel; + +import java.net.InetSocketAddress; + +public interface HttpServerChannel extends CloseableChannel { + + /** + * Returns the local address for this channel. + * + * @return the local address of this channel. + */ + InetSocketAddress getLocalAddress(); +} diff --git a/server/src/main/java/org/elasticsearch/transport/TcpServerChannel.java b/server/src/main/java/org/elasticsearch/transport/TcpServerChannel.java new file mode 100644 index 0000000000000..408ec1af20b96 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/transport/TcpServerChannel.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.common.network.CloseableChannel; + +import java.net.InetSocketAddress; + + +/** + * This is a tcp channel representing a server channel listening for new connections. It is the server + * channel abstraction used by the {@link TcpTransport} and {@link TransportService}. All tcp transport + * implementations must return server channels that adhere to the required method contracts. + */ +public interface TcpServerChannel extends CloseableChannel { + + /** + * This returns the profile for this channel. + */ + String getProfile(); + + /** + * Returns the local address for this channel. + * + * @return the local address of this channel. + */ + InetSocketAddress getLocalAddress(); + +} diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index bd862c19e9c6d..c8f256c2db89a 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -21,9 +21,6 @@ import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.IntSet; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.network.CloseableChannel; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionFuture; @@ -31,6 +28,7 @@ import org.elasticsearch.action.NotifyOnceListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -52,6 +50,7 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.MeanMetric; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkUtils; @@ -68,6 +67,7 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.KeyedLock; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.rest.RestStatus; @@ -210,7 +210,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements private final ConcurrentMap profileBoundAddresses = newConcurrentMap(); // node id to actual channel private final ConcurrentMap connectedNodes = newConcurrentMap(); - private final Map> serverChannels = newConcurrentMap(); + private final Map> serverChannels = newConcurrentMap(); private final Set acceptedChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); private final KeyedLock connectionLock = new KeyedLock<>(); @@ -792,9 +792,9 @@ protected InetSocketAddress bindToPort(final String name, final InetAddress host final AtomicReference boundSocket = new AtomicReference<>(); boolean success = portsRange.iterate(portNumber -> { try { - TcpChannel channel = bind(name, new InetSocketAddress(hostAddress, portNumber)); + TcpServerChannel channel = bind(name, new InetSocketAddress(hostAddress, portNumber)); synchronized (serverChannels) { - List list = serverChannels.get(name); + List list = serverChannels.get(name); if (list == null) { list = new ArrayList<>(); serverChannels.put(name, list); @@ -957,9 +957,9 @@ protected final void doStop() { closeLock.writeLock().lock(); try { // first stop to accept any incoming connections so nobody can connect to this transport - for (Map.Entry> entry : serverChannels.entrySet()) { + for (Map.Entry> entry : serverChannels.entrySet()) { String profile = entry.getKey(); - List channels = entry.getValue(); + List channels = entry.getValue(); ActionListener closeFailLogger = ActionListener.wrap(c -> {}, e -> logger.warn(() -> new ParameterizedMessage("Error closing serverChannel for profile [{}]", profile), e)); channels.forEach(c -> c.addCloseListener(closeFailLogger)); @@ -999,7 +999,7 @@ protected final void doStop() { } } - protected void onException(TcpChannel channel, Exception e) { + public void onException(TcpChannel channel, Exception e) { if (!lifecycle.started()) { // just close and ignore - we are already stopped and just need to make sure we release all resources CloseableChannel.closeChannel(channel); @@ -1049,6 +1049,10 @@ protected void innerOnFailure(Exception e) { } } + protected void onServerException(TcpServerChannel channel, Exception e) { + logger.error(new ParameterizedMessage("exception from server channel caught on transport layer [channel={}]", channel), e); + } + /** * Exception handler for exceptions that are not associated with a specific channel. * @@ -1072,7 +1076,7 @@ protected void serverAcceptedChannel(TcpChannel channel) { * @param name the profile name * @param address the address to bind to */ - protected abstract TcpChannel bind(String name, InetSocketAddress address) throws IOException; + protected abstract TcpServerChannel bind(String name, InetSocketAddress address) throws IOException; /** * Initiate a single tcp socket channel. @@ -1087,8 +1091,7 @@ protected void serverAcceptedChannel(TcpChannel channel) { /** * Called to tear down internal resources */ - protected void stopInternal() { - } + protected abstract void stopInternal(); public boolean canCompress(TransportRequest request) { return compress && (!(request instanceof BytesTransportRequest)); diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index a7629e5f48b6c..ece9fd503c1ce 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -35,8 +35,7 @@ import org.junit.After; import org.junit.Before; -import java.io.IOException; -import java.net.InetAddress; +import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; @@ -128,8 +127,9 @@ public void dispatchBadRequest(final RestRequest request, try (AbstractHttpServerTransport transport = new AbstractHttpServerTransport(Settings.EMPTY, networkService, bigArrays, threadPool, xContentRegistry(), dispatcher) { + @Override - protected TransportAddress bindAddress(InetAddress hostAddress) { + protected HttpServerChannel bind(InetSocketAddress hostAddress) { return null; } @@ -139,12 +139,7 @@ protected void doStart() { } @Override - protected void doStop() { - - } - - @Override - protected void doClose() throws IOException { + protected void stopInternal() { } diff --git a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java index 2328aa4636361..d16300bf266d6 100644 --- a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java @@ -193,6 +193,10 @@ protected FakeChannel initiateChannel(InetSocketAddress address, ActionListener< return new FakeChannel(messageCaptor); } + @Override + protected void stopInternal() { + } + @Override public NodeChannels getConnection(DiscoveryNode node) { int numConnections = MockTcpTransport.LIGHT_PROFILE.getNumConnections(); @@ -237,7 +241,7 @@ public NodeChannels getConnection(DiscoveryNode node) { } } - private static final class FakeChannel implements TcpChannel { + private static final class FakeChannel implements TcpChannel, TcpServerChannel { private final AtomicReference messageCaptor; diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index 8831c46c01136..bbff340c86011 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -225,7 +225,7 @@ private void configureSocket(Socket socket) throws SocketException { socket.setReuseAddress(TCP_REUSE_ADDRESS.get(settings)); } - public final class MockChannel implements Closeable, TcpChannel { + public final class MockChannel implements Closeable, TcpChannel, TcpServerChannel { private final AtomicBoolean isOpen = new AtomicBoolean(true); private final InetSocketAddress localAddress; private final ServerSocket serverSocket; diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java index cb9e243660a8e..2ab8719c33422 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java @@ -41,6 +41,7 @@ import org.elasticsearch.nio.ServerChannelContext; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TcpChannel; +import org.elasticsearch.transport.TcpServerChannel; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transports; @@ -191,7 +192,7 @@ public int consumeReads(InboundChannelBuffer channelBuffer) throws IOException { } } - private static class MockServerChannel extends NioServerSocketChannel implements TcpChannel { + private static class MockServerChannel extends NioServerSocketChannel implements TcpServerChannel { private final String profile; @@ -215,21 +216,6 @@ public String getProfile() { public void addCloseListener(ActionListener listener) { addCloseListener(ActionListener.toBiConsumer(listener)); } - - @Override - public void setSoLinger(int value) throws IOException { - throw new UnsupportedOperationException("Cannot set SO_LINGER on a server channel."); - } - - @Override - public InetSocketAddress getRemoteAddress() { - return null; - } - - @Override - public void sendMessage(BytesReference reference, ActionListener listener) { - throw new UnsupportedOperationException("Cannot send a message to a server channel."); - } } private static class MockSocketChannel extends NioSocketChannel implements TcpChannel { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java index ce06712722cd1..b761439b15b6a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java @@ -109,7 +109,7 @@ protected ChannelHandler getClientChannelInitializer() { } @Override - protected void onException(TcpChannel channel, Exception e) { + public void onException(TcpChannel channel, Exception e) { if (!lifecycle.started()) { // just close and ignore - we are already stopped and just need to make sure we release all resources CloseableChannel.closeChannel(channel); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java index 161ac3678aeab..9427812ba1349 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java @@ -24,7 +24,7 @@ import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.transport.netty4.NettyTcpChannel; +import org.elasticsearch.transport.netty4.Netty4TcpChannel; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.user.KibanaUser; @@ -116,8 +116,8 @@ requests from all the nodes are attached with a user (either a serialize } if (extractClientCert && (unwrappedChannel instanceof TcpTransportChannel) && - ((TcpTransportChannel) unwrappedChannel).getChannel() instanceof NettyTcpChannel) { - Channel channel = ((NettyTcpChannel) ((TcpTransportChannel) unwrappedChannel).getChannel()).getLowLevelChannel(); + ((TcpTransportChannel) unwrappedChannel).getChannel() instanceof Netty4TcpChannel) { + Channel channel = ((Netty4TcpChannel) ((TcpTransportChannel) unwrappedChannel).getChannel()).getLowLevelChannel(); SslHandler sslHandler = channel.pipeline().get(SslHandler.class); if (channel.isOpen()) { assert sslHandler != null : "channel [" + channel + "] did not have a ssl handler. pipeline " + channel.pipeline(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java index 5315a944f778d..fd1b1198607d1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.security.transport.nio; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.recycler.Recycler; @@ -131,9 +130,8 @@ public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel) @Override public NioTcpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { - NioTcpServerChannel nioChannel = new NioTcpServerChannel(profileName, channel); - Consumer exceptionHandler = (e) -> logger.error(() -> - new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); + NioTcpServerChannel nioChannel = new NioTcpServerChannel(profileName, channel);; + Consumer exceptionHandler = (e) -> onServerException(nioChannel, e); Consumer acceptor = SecurityNioTransport.this::acceptChannel; ServerChannelContext context = new ServerChannelContext(nioChannel, this, selector, acceptor, exceptionHandler); nioChannel.setContext(context); From 00283a61e1dbcd6990f0ed7d369261a31fc89d8c Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 20 Jun 2018 16:26:26 -0700 Subject: [PATCH 54/92] Remove unused generic type for client execute method (#31444) This commit removes the request builder generic type for AbstractClient as it was unused. --- .../index/reindex/AsyncBulkByScrollActionTests.java | 6 ++---- .../java/org/elasticsearch/client/FilterClient.java | 6 ++---- .../client/ParentTaskAssigningClient.java | 7 ++----- .../org/elasticsearch/client/node/NodeClient.java | 7 ++----- .../elasticsearch/client/support/AbstractClient.java | 7 ++++--- .../client/transport/TransportClient.java | 4 ++-- .../transport/RemoteClusterAwareClient.java | 4 +--- .../client/ParentTaskAssigningClientTests.java | 8 ++------ .../org/elasticsearch/test/client/NoOpClient.java | 7 ++----- .../org/elasticsearch/xpack/core/ClientHelper.java | 12 +++++------- .../TransportSamlInvalidateSessionActionTests.java | 5 +---- .../audit/index/IndexAuditTrailMutedTests.java | 6 ++---- .../authc/esnative/NativeUsersStoreTests.java | 11 ++--------- .../security/support/SecurityIndexManagerTests.java | 8 ++------ 14 files changed, 31 insertions(+), 67 deletions(-) diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 9d22b90ee7f5b..6a5610de37a01 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; @@ -743,9 +742,8 @@ private class MyMockClient extends FilterClient { @Override @SuppressWarnings("unchecked") - protected > void doExecute( - Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { if (false == expectedHeaders.equals(threadPool().getThreadContext().getHeaders())) { listener.onFailure( new RuntimeException("Expected " + expectedHeaders + " but got " + threadPool().getThreadContext().getHeaders())); diff --git a/server/src/main/java/org/elasticsearch/client/FilterClient.java b/server/src/main/java/org/elasticsearch/client/FilterClient.java index bfccabac58043..b4230710414be 100644 --- a/server/src/main/java/org/elasticsearch/client/FilterClient.java +++ b/server/src/main/java/org/elasticsearch/client/FilterClient.java @@ -21,13 +21,11 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; - /** * A {@link Client} that contains another {@link Client} which it * uses as its basic source, possibly transforming the requests / responses along the @@ -62,8 +60,8 @@ public void close() { } @Override - protected > void doExecute( - Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { in().execute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java b/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java index a0934ba633dd5..27de6619053b3 100644 --- a/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java +++ b/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.tasks.Task; @@ -58,10 +57,8 @@ public Client unwrap() { } @Override - protected < Request extends ActionRequest, - Response extends ActionResponse, - RequestBuilder extends ActionRequestBuilder - > void doExecute(Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { request.setParentTask(parentTask); super.doExecute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/client/node/NodeClient.java b/server/src/main/java/org/elasticsearch/client/node/NodeClient.java index 9e50fa56fab60..0ad863c936741 100644 --- a/server/src/main/java/org/elasticsearch/client/node/NodeClient.java +++ b/server/src/main/java/org/elasticsearch/client/node/NodeClient.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.Client; @@ -67,10 +66,8 @@ public void close() { } @Override - public < Request extends ActionRequest, - Response extends ActionResponse, - RequestBuilder extends ActionRequestBuilder - > void doExecute(Action action, Request request, ActionListener listener) { + public + void doExecute(Action action, Request request, ActionListener listener) { // Discard the task because the Client interface doesn't use it. executeLocally(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 12db219f8ec78..31e5e3190079d 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainAction; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainRequest; @@ -401,7 +400,8 @@ public final vo doExecute(action, request, listener); } - protected abstract > void doExecute(Action action, Request request, ActionListener listener); + protected abstract + void doExecute(Action action, Request request, ActionListener listener); @Override public ActionFuture index(final IndexRequest request) { @@ -1764,7 +1764,8 @@ public void getSettings(GetSettingsRequest request, ActionListener headers) { return new FilterClient(this) { @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { ThreadContext threadContext = threadPool().getThreadContext(); try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(headers)) { super.doExecute(action, request, listener); diff --git a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 53f6dea21c7d1..ba18105e3f1ca 100644 --- a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.cluster.ClusterModule; @@ -377,7 +376,8 @@ public void close() { } @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { proxy.execute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java index aca8cdccaddb8..d93bbb57201e2 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.support.AbstractClient; @@ -43,8 +42,7 @@ final class RemoteClusterAwareClient extends AbstractClient { } @Override - protected > + protected void doExecute(Action action, Request request, ActionListener listener) { remoteClusterService.ensureConnected(clusterAlias, ActionListener.wrap(res -> { Transport.Connection connection = remoteClusterService.getConnection(clusterAlias); diff --git a/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java b/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java index bff713a225482..eb1338ad78816 100644 --- a/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java +++ b/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.search.ClearScrollRequest; @@ -38,11 +37,8 @@ public void testSetsParentId() { // This mock will do nothing but verify that parentTaskId is set on all requests sent to it. NoOpClient mock = new NoOpClient(getTestName()) { @Override - protected < Request extends ActionRequest, - Response extends ActionResponse, - RequestBuilder extends ActionRequestBuilder - > void doExecute(Action action, Request request, - ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { assertEquals(parentTaskId[0], request.getParentTask()); super.doExecute(action, request, listener); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java index 8ef08a259821a..d95e1d32663e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.common.settings.Settings; @@ -51,10 +50,8 @@ public NoOpClient(String testName) { } @Override - protected > - void doExecute(Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { listener.onResponse(null); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java index ff3091bde93b9..0657eb013972a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java @@ -138,10 +138,9 @@ public static T executeWithHeaders(Map> void executeWithHeadersAsync( - Map headers, String origin, Client client, Action action, Request request, - ActionListener listener) { + public static + void executeWithHeadersAsync(Map headers, String origin, Client client, Action action, Request request, + ActionListener listener) { Map filteredHeaders = headers.entrySet().stream().filter(e -> SECURITY_HEADER_FILTERS.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); @@ -176,9 +175,8 @@ private ClientWithOrigin(Client in, String origin) { } @Override - protected > void doExecute( - Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { final Supplier supplier = in().threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = in().threadPool().getThreadContext().stashContext()) { in().threadPool().getThreadContext().putTransient(ACTION_ORIGIN_TRANSIENT_NAME, origin); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 85d1d4a161d1d..b46d307866284 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; @@ -123,9 +122,7 @@ public void setup() throws Exception { searchRequests = new ArrayList<>(); final Client client = new NoOpClient(threadPool) { @Override - protected > + protected void doExecute(Action action, Request request, ActionListener listener) { if (IndexAction.NAME.equals(action.name())) { assertThat(request, instanceOf(IndexRequest.class)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java index c17134093c593..9bc5c989d1f9c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; @@ -70,9 +69,8 @@ class IClient extends FilterClient { } @Override - protected > void doExecute( - Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { clientCalled.set(true); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java index 3d739d57f480c..9fbcaa493dd96 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; @@ -72,14 +71,8 @@ public void setupMocks() { client = new FilterClient(mockClient) { @Override - protected < - Request extends ActionRequest, - Response extends ActionResponse, - RequestBuilder extends ActionRequestBuilder - > void doExecute( - Action action, - Request request, - ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { requests.add(new Tuple<>(request, listener)); } }; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index 928c9bbd1b143..7d10198c6aea8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.client.Client; @@ -80,11 +79,8 @@ public void setUpManager() { actions = new LinkedHashMap<>(); final Client client = new FilterClient(mockClient) { @Override - protected > - void doExecute(Action action, Request request, - ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { final Map> map = actions.getOrDefault(action, new HashMap<>()); map.put(request, listener); actions.put(action, map); From 86423f9563dd41cbf2a15b16962e2878ffe98185 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 20 Jun 2018 19:50:14 -0600 Subject: [PATCH 55/92] Ensure local addresses aren't null (#31440) Currently we set local addresses on the creation time of a NioChannel. However, this may return null as the local address may not have been set yet. An example is the local address has not been set on a client channel as the connection process is not yet complete. This PR modifies the getter to set the local field if it is currently null. --- .../org/elasticsearch/nio/ChannelFactory.java | 6 +++++ .../org/elasticsearch/nio/NioChannel.java | 13 ++-------- .../nio/NioServerSocketChannel.java | 25 ++++++++++++++----- .../elasticsearch/nio/NioSocketChannel.java | 21 +++++++++++++--- .../elasticsearch/nio/EventHandlerTests.java | 5 +++- .../http/nio/NioHttpChannel.java | 3 +-- .../transport/nio/NioTcpChannel.java | 2 +- .../transport/nio/NioTcpServerChannel.java | 4 +-- .../transport/nio/MockNioTransport.java | 8 +++--- 9 files changed, 54 insertions(+), 33 deletions(-) diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/ChannelFactory.java b/libs/nio/src/main/java/org/elasticsearch/nio/ChannelFactory.java index 77443d948d9a6..f0dc3e567fef6 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/ChannelFactory.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/ChannelFactory.java @@ -21,6 +21,7 @@ import java.io.Closeable; import java.io.IOException; +import java.io.UncheckedIOException; import java.net.InetSocketAddress; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; @@ -99,6 +100,11 @@ private Socket internalCreateChannel(NioSelector selector, SocketChannel rawChan Socket channel = createChannel(selector, rawChannel); assert channel.getContext() != null : "channel context should have been set on channel"; return channel; + } catch (UncheckedIOException e) { + // This can happen if getRemoteAddress throws IOException. + IOException cause = e.getCause(); + closeRawChannel(rawChannel, cause); + throw cause; } catch (Exception e) { closeRawChannel(rawChannel, e); throw e; diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/NioChannel.java b/libs/nio/src/main/java/org/elasticsearch/nio/NioChannel.java index 2cc2bd260f0b2..55038fabcef8e 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/NioChannel.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/NioChannel.java @@ -19,7 +19,6 @@ package org.elasticsearch.nio; -import java.io.IOException; import java.net.InetSocketAddress; import java.nio.channels.NetworkChannel; import java.util.function.BiConsumer; @@ -32,20 +31,10 @@ */ public abstract class NioChannel { - private final InetSocketAddress localAddress; - - NioChannel(NetworkChannel socketChannel) throws IOException { - this.localAddress = (InetSocketAddress) socketChannel.getLocalAddress(); - } - public boolean isOpen() { return getContext().isOpen(); } - public InetSocketAddress getLocalAddress() { - return localAddress; - } - /** * Adds a close listener to the channel. Multiple close listeners can be added. There is no guarantee * about the order in which close listeners will be executed. If the channel is already closed, the @@ -64,6 +53,8 @@ public void close() { getContext().closeChannel(); } + public abstract InetSocketAddress getLocalAddress(); + public abstract NetworkChannel getRawChannel(); public abstract ChannelContext getContext(); diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/NioServerSocketChannel.java b/libs/nio/src/main/java/org/elasticsearch/nio/NioServerSocketChannel.java index 9f78c3b1b319d..a335e6925881a 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/NioServerSocketChannel.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/NioServerSocketChannel.java @@ -19,19 +19,20 @@ package org.elasticsearch.nio; -import java.io.IOException; +import java.net.InetSocketAddress; import java.nio.channels.ServerSocketChannel; import java.util.concurrent.atomic.AtomicBoolean; public class NioServerSocketChannel extends NioChannel { - private final ServerSocketChannel socketChannel; + private final ServerSocketChannel serverSocketChannel; private final AtomicBoolean contextSet = new AtomicBoolean(false); + private volatile InetSocketAddress localAddress; private ServerChannelContext context; - public NioServerSocketChannel(ServerSocketChannel socketChannel) throws IOException { - super(socketChannel); - this.socketChannel = socketChannel; + public NioServerSocketChannel(ServerSocketChannel serverSocketChannel) { + this.serverSocketChannel = serverSocketChannel; + attemptToSetLocalAddress(); } /** @@ -48,9 +49,15 @@ public void setContext(ServerChannelContext context) { } } + @Override + public InetSocketAddress getLocalAddress() { + attemptToSetLocalAddress(); + return localAddress; + } + @Override public ServerSocketChannel getRawChannel() { - return socketChannel; + return serverSocketChannel; } @Override @@ -64,4 +71,10 @@ public String toString() { "localAddress=" + getLocalAddress() + '}'; } + + private void attemptToSetLocalAddress() { + if (localAddress == null) { + localAddress = (InetSocketAddress) serverSocketChannel.socket().getLocalSocketAddress(); + } + } } diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/NioSocketChannel.java b/libs/nio/src/main/java/org/elasticsearch/nio/NioSocketChannel.java index 32e934766913e..c7d44990837cd 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/NioSocketChannel.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/NioSocketChannel.java @@ -20,6 +20,7 @@ package org.elasticsearch.nio; import java.io.IOException; +import java.io.UncheckedIOException; import java.net.InetSocketAddress; import java.nio.channels.SocketChannel; import java.util.concurrent.atomic.AtomicBoolean; @@ -27,15 +28,19 @@ public class NioSocketChannel extends NioChannel { - private final InetSocketAddress remoteAddress; private final AtomicBoolean contextSet = new AtomicBoolean(false); private final SocketChannel socketChannel; + private final InetSocketAddress remoteAddress; + private volatile InetSocketAddress localAddress; private SocketChannelContext context; - public NioSocketChannel(SocketChannel socketChannel) throws IOException { - super(socketChannel); + public NioSocketChannel(SocketChannel socketChannel) { this.socketChannel = socketChannel; - this.remoteAddress = (InetSocketAddress) socketChannel.getRemoteAddress(); + try { + this.remoteAddress = (InetSocketAddress) socketChannel.getRemoteAddress(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } public void setContext(SocketChannelContext context) { @@ -46,6 +51,14 @@ public void setContext(SocketChannelContext context) { } } + @Override + public InetSocketAddress getLocalAddress() { + if (localAddress == null) { + localAddress = (InetSocketAddress) socketChannel.socket().getLocalSocketAddress(); + } + return localAddress; + } + @Override public SocketChannel getRawChannel() { return socketChannel; diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java index a9e1836199e25..0cc3aa048008a 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java @@ -23,6 +23,7 @@ import org.junit.Before; import java.io.IOException; +import java.net.ServerSocket; import java.nio.channels.CancelledKeyException; import java.nio.channels.SelectionKey; import java.nio.channels.ServerSocketChannel; @@ -69,7 +70,9 @@ public void setUpHandler() throws IOException { channel.setContext(context); handler.handleRegistration(context); - NioServerSocketChannel serverChannel = new NioServerSocketChannel(mock(ServerSocketChannel.class)); + ServerSocketChannel serverSocketChannel = mock(ServerSocketChannel.class); + when(serverSocketChannel.socket()).thenReturn(mock(ServerSocket.class)); + NioServerSocketChannel serverChannel = new NioServerSocketChannel(serverSocketChannel); serverContext = new DoNotRegisterServerContext(serverChannel, mock(NioSelector.class), mock(Consumer.class)); serverChannel.setContext(serverContext); diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java index 255faab5ddad0..0a797a5687ec7 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java @@ -24,12 +24,11 @@ import org.elasticsearch.http.HttpResponse; import org.elasticsearch.nio.NioSocketChannel; -import java.io.IOException; import java.nio.channels.SocketChannel; public class NioHttpChannel extends NioSocketChannel implements HttpChannel { - NioHttpChannel(SocketChannel socketChannel) throws IOException { + NioHttpChannel(SocketChannel socketChannel) { super(socketChannel); } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java index d700ad567bc19..947a255b178c8 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java @@ -32,7 +32,7 @@ public class NioTcpChannel extends NioSocketChannel implements TcpChannel { private final String profile; - public NioTcpChannel(String profile, SocketChannel socketChannel) throws IOException { + public NioTcpChannel(String profile, SocketChannel socketChannel) { super(socketChannel); this.profile = profile; } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java index 3c6d4b12df943..0d4b00f14b461 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java @@ -23,7 +23,6 @@ import org.elasticsearch.nio.NioServerSocketChannel; import org.elasticsearch.transport.TcpServerChannel; -import java.io.IOException; import java.nio.channels.ServerSocketChannel; /** @@ -34,12 +33,11 @@ public class NioTcpServerChannel extends NioServerSocketChannel implements TcpSe private final String profile; - public NioTcpServerChannel(String profile, ServerSocketChannel socketChannel) throws IOException { + public NioTcpServerChannel(String profile, ServerSocketChannel socketChannel) { super(socketChannel); this.profile = profile; } - @Override public void close() { getContext().closeChannel(); } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java index 2ab8719c33422..3eca4818c4ab1 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java @@ -165,7 +165,7 @@ public MockSocketChannel createChannel(NioSelector selector, SocketChannel chann @Override public MockServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { - MockServerChannel nioServerChannel = new MockServerChannel(profileName, channel, this, selector); + MockServerChannel nioServerChannel = new MockServerChannel(profileName, channel); Consumer exceptionHandler = (e) -> logger.error(() -> new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); ServerChannelContext context = new ServerChannelContext(nioServerChannel, this, selector, MockNioTransport.this::acceptChannel, @@ -196,8 +196,7 @@ private static class MockServerChannel extends NioServerSocketChannel implements private final String profile; - MockServerChannel(String profile, ServerSocketChannel channel, ChannelFactory channelFactory, NioSelector selector) - throws IOException { + MockServerChannel(String profile, ServerSocketChannel channel) { super(channel); this.profile = profile; } @@ -222,8 +221,7 @@ private static class MockSocketChannel extends NioSocketChannel implements TcpCh private final String profile; - private MockSocketChannel(String profile, java.nio.channels.SocketChannel socketChannel, NioSelector selector) - throws IOException { + private MockSocketChannel(String profile, java.nio.channels.SocketChannel socketChannel, NioSelector selector) { super(socketChannel); this.profile = profile; } From da69ab28c70bb2817da1efcdd45a14781f741157 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 21 Jun 2018 16:00:26 +0200 Subject: [PATCH 56/92] Return transport addresses from UnicastHostsProvider (#31426) With #20695 we removed local transport and there is just TransportAddress now. The UnicastHostsProvider currently returns DiscoveryNode instances, where, during pinging, we're actually only making use of the TransportAddress to establish a first connection to the possible new node. To simplify the interface, we can just return a list of transport addresses instead, which means that it's not necessary anymore to create fake node objects in each plugin just to return the address information. --- .../classic/AzureUnicastHostsProvider.java | 26 +++---- .../ec2/AwsEc2UnicastHostsProvider.java | 33 ++++---- .../discovery/ec2/Ec2DiscoveryTests.java | 75 +++++++++---------- .../file/FileBasedUnicastHostsProvider.java | 14 ++-- .../FileBasedUnicastHostsProviderTests.java | 43 +++++------ .../gce/GceUnicastHostsProvider.java | 25 +++---- .../discovery/gce/GceDiscoveryTests.java | 53 +++++++------ .../discovery/zen/UnicastHostsProvider.java | 4 +- .../discovery/zen/UnicastZenPing.java | 75 ++++++++----------- .../single/SingleNodeDiscoveryIT.java | 2 +- .../discovery/zen/UnicastZenPingTests.java | 41 +++++----- .../discovery/MockUncasedHostProvider.java | 4 +- 12 files changed, 175 insertions(+), 220 deletions(-) diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java index 2bc6cc4b130cd..482dafb008fc5 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java @@ -24,12 +24,10 @@ import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; import com.microsoft.windowsazure.management.compute.models.InstanceEndpoint; import com.microsoft.windowsazure.management.compute.models.RoleInstance; -import org.elasticsearch.Version; import org.elasticsearch.cloud.azure.classic.AzureServiceDisableException; import org.elasticsearch.cloud.azure.classic.AzureServiceRemoteException; import org.elasticsearch.cloud.azure.classic.management.AzureComputeService; import org.elasticsearch.cloud.azure.classic.management.AzureComputeService.Discovery; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.network.InetAddresses; @@ -47,9 +45,6 @@ import java.util.ArrayList; import java.util.List; -import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; - public class AzureUnicastHostsProvider extends AbstractComponent implements UnicastHostsProvider { public enum HostType { @@ -104,7 +99,7 @@ public static Deployment fromString(String string) { private final TimeValue refreshInterval; private long lastRefresh; - private List cachedDiscoNodes; + private List dynamicHosts; private final HostType hostType; private final String publicEndpointName; private final String deploymentName; @@ -137,30 +132,30 @@ public AzureUnicastHostsProvider(Settings settings, AzureComputeService azureCom * Setting `cloud.azure.refresh_interval` to `0` will disable caching (default). */ @Override - public List buildDynamicNodes() { + public List buildDynamicHosts() { if (refreshInterval.millis() != 0) { - if (cachedDiscoNodes != null && + if (dynamicHosts != null && (refreshInterval.millis() < 0 || (System.currentTimeMillis() - lastRefresh) < refreshInterval.millis())) { logger.trace("using cache to retrieve node list"); - return cachedDiscoNodes; + return dynamicHosts; } lastRefresh = System.currentTimeMillis(); } logger.debug("start building nodes list using Azure API"); - cachedDiscoNodes = new ArrayList<>(); + dynamicHosts = new ArrayList<>(); HostedServiceGetDetailedResponse detailed; try { detailed = azureComputeService.getServiceDetails(); } catch (AzureServiceDisableException e) { logger.debug("Azure discovery service has been disabled. Returning empty list of nodes."); - return cachedDiscoNodes; + return dynamicHosts; } catch (AzureServiceRemoteException e) { // We got a remote exception logger.warn("can not get list of azure nodes: [{}]. Returning empty list of nodes.", e.getMessage()); logger.trace("AzureServiceRemoteException caught", e); - return cachedDiscoNodes; + return dynamicHosts; } InetAddress ipAddress = null; @@ -212,8 +207,7 @@ public List buildDynamicNodes() { TransportAddress[] addresses = transportService.addressesFromString(networkAddress, 1); for (TransportAddress address : addresses) { logger.trace("adding {}, transport_address {}", networkAddress, address); - cachedDiscoNodes.add(new DiscoveryNode("#cloud-" + instance.getInstanceName(), address, emptyMap(), - emptySet(), Version.CURRENT.minimumCompatibilityVersion())); + dynamicHosts.add(address); } } catch (Exception e) { logger.warn("can not convert [{}] to transport address. skipping. [{}]", networkAddress, e.getMessage()); @@ -221,9 +215,9 @@ public List buildDynamicNodes() { } } - logger.debug("{} node(s) added", cachedDiscoNodes.size()); + logger.debug("{} addresses added", dynamicHosts.size()); - return cachedDiscoNodes; + return dynamicHosts; } protected String resolveInstanceAddress(final HostType hostType, final RoleInstance instance) { diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java index 2c536981b04c5..396e9f707d404 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java @@ -29,8 +29,6 @@ import com.amazonaws.services.ec2.model.Tag; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -46,8 +44,6 @@ import java.util.Set; import static java.util.Collections.disjoint; -import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; import static org.elasticsearch.discovery.ec2.AwsEc2Service.HostType.TAG_PREFIX; import static org.elasticsearch.discovery.ec2.AwsEc2Service.HostType.PRIVATE_DNS; import static org.elasticsearch.discovery.ec2.AwsEc2Service.HostType.PRIVATE_IP; @@ -70,7 +66,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos private final String hostType; - private final DiscoNodesCache discoNodes; + private final TransportAddressesCache dynamicHosts; AwsEc2UnicastHostsProvider(Settings settings, TransportService transportService, AwsEc2Service awsEc2Service) { super(settings); @@ -78,7 +74,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos this.awsEc2Service = awsEc2Service; this.hostType = AwsEc2Service.HOST_TYPE_SETTING.get(settings); - this.discoNodes = new DiscoNodesCache(AwsEc2Service.NODE_CACHE_TIME_SETTING.get(settings)); + this.dynamicHosts = new TransportAddressesCache(AwsEc2Service.NODE_CACHE_TIME_SETTING.get(settings)); this.bindAnyGroup = AwsEc2Service.ANY_GROUP_SETTING.get(settings); this.groups = new HashSet<>(); @@ -96,13 +92,13 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos } @Override - public List buildDynamicNodes() { - return discoNodes.getOrRefresh(); + public List buildDynamicHosts() { + return dynamicHosts.getOrRefresh(); } - protected List fetchDynamicNodes() { + protected List fetchDynamicNodes() { - final List discoNodes = new ArrayList<>(); + final List dynamicHosts = new ArrayList<>(); final DescribeInstancesResult descInstances; try (AmazonEc2Reference clientReference = awsEc2Service.client()) { @@ -115,7 +111,7 @@ protected List fetchDynamicNodes() { } catch (final AmazonClientException e) { logger.info("Exception while retrieving instance list from AWS API: {}", e.getMessage()); logger.debug("Full exception:", e); - return discoNodes; + return dynamicHosts; } logger.trace("building dynamic unicast discovery nodes..."); @@ -179,8 +175,7 @@ && disjoint(securityGroupIds, groups)) { final TransportAddress[] addresses = transportService.addressesFromString(address, 1); for (int i = 0; i < addresses.length; i++) { logger.trace("adding {}, address {}, transport_address {}", instance.getInstanceId(), address, addresses[i]); - discoNodes.add(new DiscoveryNode(instance.getInstanceId(), "#cloud-" + instance.getInstanceId() + "-" + i, - addresses[i], emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion())); + dynamicHosts.add(addresses[i]); } } catch (final Exception e) { final String finalAddress = address; @@ -194,9 +189,9 @@ && disjoint(securityGroupIds, groups)) { } } - logger.debug("using dynamic discovery nodes {}", discoNodes); + logger.debug("using dynamic transport addresses {}", dynamicHosts); - return discoNodes; + return dynamicHosts; } private DescribeInstancesRequest buildDescribeInstancesRequest() { @@ -222,11 +217,11 @@ private DescribeInstancesRequest buildDescribeInstancesRequest() { return describeInstancesRequest; } - private final class DiscoNodesCache extends SingleObjectCache> { + private final class TransportAddressesCache extends SingleObjectCache> { private boolean empty = true; - protected DiscoNodesCache(TimeValue refreshInterval) { + protected TransportAddressesCache(TimeValue refreshInterval) { super(refreshInterval, new ArrayList<>()); } @@ -236,8 +231,8 @@ protected boolean needsRefresh() { } @Override - protected List refresh() { - final List nodes = fetchDynamicNodes(); + protected List refresh() { + final List nodes = fetchDynamicNodes(); empty = nodes.isEmpty(); return nodes; } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index 43cc924fadb10..9dc2e02edc1b5 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -21,7 +21,6 @@ import com.amazonaws.services.ec2.model.Tag; import org.elasticsearch.Version; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -87,16 +86,16 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi null); } - protected List buildDynamicNodes(Settings nodeSettings, int nodes) { - return buildDynamicNodes(nodeSettings, nodes, null); + protected List buildDynamicHosts(Settings nodeSettings, int nodes) { + return buildDynamicHosts(nodeSettings, nodes, null); } - protected List buildDynamicNodes(Settings nodeSettings, int nodes, List> tagsList) { + protected List buildDynamicHosts(Settings nodeSettings, int nodes, List> tagsList) { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY, nodes, tagsList)) { AwsEc2UnicastHostsProvider provider = new AwsEc2UnicastHostsProvider(nodeSettings, transportService, plugin.ec2Service); - List discoveryNodes = provider.buildDynamicNodes(); - logger.debug("--> nodes found: {}", discoveryNodes); - return discoveryNodes; + List dynamicHosts = provider.buildDynamicHosts(); + logger.debug("--> addresses found: {}", dynamicHosts); + return dynamicHosts; } catch (IOException e) { fail("Unexpected IOException"); return null; @@ -107,7 +106,7 @@ public void testDefaultSettings() throws InterruptedException { int nodes = randomInt(10); Settings nodeSettings = Settings.builder() .build(); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes); + List discoveryNodes = buildDynamicHosts(nodeSettings, nodes); assertThat(discoveryNodes, hasSize(nodes)); } @@ -119,12 +118,11 @@ public void testPrivateIp() throws InterruptedException { Settings nodeSettings = Settings.builder() .put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "private_ip") .build(); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes); - assertThat(discoveryNodes, hasSize(nodes)); + List transportAddresses = buildDynamicHosts(nodeSettings, nodes); + assertThat(transportAddresses, hasSize(nodes)); // We check that we are using here expected address int node = 1; - for (DiscoveryNode discoveryNode : discoveryNodes) { - TransportAddress address = discoveryNode.getAddress(); + for (TransportAddress address : transportAddresses) { TransportAddress expected = poorMansDNS.get(AmazonEC2Mock.PREFIX_PRIVATE_IP + node++); assertEquals(address, expected); } @@ -138,12 +136,11 @@ public void testPublicIp() throws InterruptedException { Settings nodeSettings = Settings.builder() .put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "public_ip") .build(); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes); - assertThat(discoveryNodes, hasSize(nodes)); + List dynamicHosts = buildDynamicHosts(nodeSettings, nodes); + assertThat(dynamicHosts, hasSize(nodes)); // We check that we are using here expected address int node = 1; - for (DiscoveryNode discoveryNode : discoveryNodes) { - TransportAddress address = discoveryNode.getAddress(); + for (TransportAddress address : dynamicHosts) { TransportAddress expected = poorMansDNS.get(AmazonEC2Mock.PREFIX_PUBLIC_IP + node++); assertEquals(address, expected); } @@ -159,13 +156,12 @@ public void testPrivateDns() throws InterruptedException { Settings nodeSettings = Settings.builder() .put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "private_dns") .build(); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes); - assertThat(discoveryNodes, hasSize(nodes)); + List dynamicHosts = buildDynamicHosts(nodeSettings, nodes); + assertThat(dynamicHosts, hasSize(nodes)); // We check that we are using here expected address int node = 1; - for (DiscoveryNode discoveryNode : discoveryNodes) { + for (TransportAddress address : dynamicHosts) { String instanceId = "node" + node++; - TransportAddress address = discoveryNode.getAddress(); TransportAddress expected = poorMansDNS.get( AmazonEC2Mock.PREFIX_PRIVATE_DNS + instanceId + AmazonEC2Mock.SUFFIX_PRIVATE_DNS); assertEquals(address, expected); @@ -182,13 +178,12 @@ public void testPublicDns() throws InterruptedException { Settings nodeSettings = Settings.builder() .put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "public_dns") .build(); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes); - assertThat(discoveryNodes, hasSize(nodes)); + List dynamicHosts = buildDynamicHosts(nodeSettings, nodes); + assertThat(dynamicHosts, hasSize(nodes)); // We check that we are using here expected address int node = 1; - for (DiscoveryNode discoveryNode : discoveryNodes) { + for (TransportAddress address : dynamicHosts) { String instanceId = "node" + node++; - TransportAddress address = discoveryNode.getAddress(); TransportAddress expected = poorMansDNS.get( AmazonEC2Mock.PREFIX_PUBLIC_DNS + instanceId + AmazonEC2Mock.SUFFIX_PUBLIC_DNS); assertEquals(address, expected); @@ -201,7 +196,7 @@ public void testInvalidHostType() throws InterruptedException { .build(); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { - buildDynamicNodes(nodeSettings, 1); + buildDynamicHosts(nodeSettings, 1); }); assertThat(exception.getMessage(), containsString("does_not_exist is unknown for discovery.ec2.host_type")); } @@ -227,8 +222,8 @@ public void testFilterByTags() throws InterruptedException { } logger.info("started [{}] instances with [{}] stage=prod tag", nodes, prodInstances); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes, tagsList); - assertThat(discoveryNodes, hasSize(prodInstances)); + List dynamicHosts = buildDynamicHosts(nodeSettings, nodes, tagsList); + assertThat(dynamicHosts, hasSize(prodInstances)); } public void testFilterByMultipleTags() throws InterruptedException { @@ -258,8 +253,8 @@ public void testFilterByMultipleTags() throws InterruptedException { } logger.info("started [{}] instances with [{}] stage=prod tag", nodes, prodInstances); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes, tagsList); - assertThat(discoveryNodes, hasSize(prodInstances)); + List dynamicHosts = buildDynamicHosts(nodeSettings, nodes, tagsList); + assertThat(dynamicHosts, hasSize(prodInstances)); } public void testReadHostFromTag() throws InterruptedException, UnknownHostException { @@ -285,11 +280,11 @@ public void testReadHostFromTag() throws InterruptedException, UnknownHostExcept } logger.info("started [{}] instances", nodes); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes, tagsList); - assertThat(discoveryNodes, hasSize(nodes)); - for (DiscoveryNode discoveryNode : discoveryNodes) { - TransportAddress address = discoveryNode.getAddress(); - TransportAddress expected = poorMansDNS.get(discoveryNode.getName()); + List dynamicHosts = buildDynamicHosts(nodeSettings, nodes, tagsList); + assertThat(dynamicHosts, hasSize(nodes)); + int node = 1; + for (TransportAddress address : dynamicHosts) { + TransportAddress expected = poorMansDNS.get("node" + node++); assertEquals(address, expected); } } @@ -306,13 +301,13 @@ public void testGetNodeListEmptyCache() throws Exception { AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(Settings.EMPTY, 1, null); DummyEc2HostProvider provider = new DummyEc2HostProvider(Settings.EMPTY, transportService, awsEc2Service) { @Override - protected List fetchDynamicNodes() { + protected List fetchDynamicNodes() { fetchCount++; return new ArrayList<>(); } }; for (int i=0; i<3; i++) { - provider.buildDynamicNodes(); + provider.buildDynamicHosts(); } assertThat(provider.fetchCount, is(3)); } @@ -323,18 +318,18 @@ public void testGetNodeListCached() throws Exception { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY)) { DummyEc2HostProvider provider = new DummyEc2HostProvider(builder.build(), transportService, plugin.ec2Service) { @Override - protected List fetchDynamicNodes() { + protected List fetchDynamicNodes() { fetchCount++; - return Ec2DiscoveryTests.this.buildDynamicNodes(Settings.EMPTY, 1); + return Ec2DiscoveryTests.this.buildDynamicHosts(Settings.EMPTY, 1); } }; for (int i=0; i<3; i++) { - provider.buildDynamicNodes(); + provider.buildDynamicHosts(); } assertThat(provider.fetchCount, is(1)); Thread.sleep(1_000L); // wait for cache to expire for (int i=0; i<3; i++) { - provider.buildDynamicNodes(); + provider.buildDynamicHosts(); } assertThat(provider.fetchCount, is(2)); } diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java index 1029f907a660a..7abcb4454720c 100644 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java @@ -21,8 +21,8 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.env.Environment; @@ -58,7 +58,6 @@ class FileBasedUnicastHostsProvider extends AbstractComponent implements UnicastHostsProvider { static final String UNICAST_HOSTS_FILE = "unicast_hosts.txt"; - static final String UNICAST_HOST_PREFIX = "#zen_file_unicast_host_"; private final TransportService transportService; private final ExecutorService executorService; @@ -76,7 +75,7 @@ class FileBasedUnicastHostsProvider extends AbstractComponent implements Unicast } @Override - public List buildDynamicNodes() { + public List buildDynamicHosts() { List hostsList; try (Stream lines = Files.lines(unicastHostsFilePath)) { hostsList = lines.filter(line -> line.startsWith("#") == false) // lines starting with `#` are comments @@ -91,23 +90,22 @@ public List buildDynamicNodes() { hostsList = Collections.emptyList(); } - final List discoNodes = new ArrayList<>(); + final List dynamicHosts = new ArrayList<>(); try { - discoNodes.addAll(resolveHostsLists( + dynamicHosts.addAll(resolveHostsLists( executorService, logger, hostsList, 1, transportService, - UNICAST_HOST_PREFIX, resolveTimeout)); } catch (InterruptedException e) { throw new RuntimeException(e); } - logger.debug("[discovery-file] Using dynamic discovery nodes {}", discoNodes); + logger.debug("[discovery-file] Using dynamic discovery nodes {}", dynamicHosts); - return discoNodes; + return dynamicHosts; } } diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java index 3ddd15a7b4cf3..860d3537635d5 100644 --- a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java +++ b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.discovery.file; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -50,7 +49,6 @@ import java.util.concurrent.Executors; import static org.elasticsearch.discovery.file.FileBasedUnicastHostsProvider.UNICAST_HOSTS_FILE; -import static org.elasticsearch.discovery.file.FileBasedUnicastHostsProvider.UNICAST_HOST_PREFIX; /** * Tests for {@link FileBasedUnicastHostsProvider}. @@ -104,23 +102,20 @@ public BoundTransportAddress boundAddress() { public void testBuildDynamicNodes() throws Exception { final List hostEntries = Arrays.asList("#comment, should be ignored", "192.168.0.1", "192.168.0.2:9305", "255.255.23.15"); - final List nodes = setupAndRunHostProvider(hostEntries); + final List nodes = setupAndRunHostProvider(hostEntries); assertEquals(hostEntries.size() - 1, nodes.size()); // minus 1 because we are ignoring the first line that's a comment - assertEquals("192.168.0.1", nodes.get(0).getAddress().getAddress()); - assertEquals(9300, nodes.get(0).getAddress().getPort()); - assertEquals(UNICAST_HOST_PREFIX + "192.168.0.1_0#", nodes.get(0).getId()); - assertEquals("192.168.0.2", nodes.get(1).getAddress().getAddress()); - assertEquals(9305, nodes.get(1).getAddress().getPort()); - assertEquals(UNICAST_HOST_PREFIX + "192.168.0.2:9305_0#", nodes.get(1).getId()); - assertEquals("255.255.23.15", nodes.get(2).getAddress().getAddress()); - assertEquals(9300, nodes.get(2).getAddress().getPort()); - assertEquals(UNICAST_HOST_PREFIX + "255.255.23.15_0#", nodes.get(2).getId()); + assertEquals("192.168.0.1", nodes.get(0).getAddress()); + assertEquals(9300, nodes.get(0).getPort()); + assertEquals("192.168.0.2", nodes.get(1).getAddress()); + assertEquals(9305, nodes.get(1).getPort()); + assertEquals("255.255.23.15", nodes.get(2).getAddress()); + assertEquals(9300, nodes.get(2).getPort()); } public void testEmptyUnicastHostsFile() throws Exception { final List hostEntries = Collections.emptyList(); - final List nodes = setupAndRunHostProvider(hostEntries); - assertEquals(0, nodes.size()); + final List addresses = setupAndRunHostProvider(hostEntries); + assertEquals(0, addresses.size()); } public void testUnicastHostsDoesNotExist() throws Exception { @@ -129,27 +124,27 @@ public void testUnicastHostsDoesNotExist() throws Exception { .build(); final Environment environment = TestEnvironment.newEnvironment(settings); final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(environment, transportService, executorService); - final List nodes = provider.buildDynamicNodes(); - assertEquals(0, nodes.size()); + final List addresses = provider.buildDynamicHosts(); + assertEquals(0, addresses.size()); } public void testInvalidHostEntries() throws Exception { List hostEntries = Arrays.asList("192.168.0.1:9300:9300"); - List nodes = setupAndRunHostProvider(hostEntries); - assertEquals(0, nodes.size()); + List addresses = setupAndRunHostProvider(hostEntries); + assertEquals(0, addresses.size()); } public void testSomeInvalidHostEntries() throws Exception { List hostEntries = Arrays.asList("192.168.0.1:9300:9300", "192.168.0.1:9301"); - List nodes = setupAndRunHostProvider(hostEntries); - assertEquals(1, nodes.size()); // only one of the two is valid and will be used - assertEquals("192.168.0.1", nodes.get(0).getAddress().getAddress()); - assertEquals(9301, nodes.get(0).getAddress().getPort()); + List addresses = setupAndRunHostProvider(hostEntries); + assertEquals(1, addresses.size()); // only one of the two is valid and will be used + assertEquals("192.168.0.1", addresses.get(0).getAddress()); + assertEquals(9301, addresses.get(0).getPort()); } // sets up the config dir, writes to the unicast hosts file in the config dir, // and then runs the file-based unicast host provider to get the list of discovery nodes - private List setupAndRunHostProvider(final List hostEntries) throws IOException { + private List setupAndRunHostProvider(final List hostEntries) throws IOException { final Path homeDir = createTempDir(); final Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), homeDir) @@ -168,6 +163,6 @@ private List setupAndRunHostProvider(final List hostEntri } return new FileBasedUnicastHostsProvider( - new Environment(settings, configPath), transportService, executorService).buildDynamicNodes(); + new Environment(settings, configPath), transportService, executorService).buildDynamicHosts(); } } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index de290245895d2..790d70a8b99b0 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -31,9 +31,7 @@ import com.google.api.services.compute.model.NetworkInterface; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.Version; import org.elasticsearch.cloud.gce.GceInstancesService; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.network.NetworkAddress; @@ -47,8 +45,6 @@ import org.elasticsearch.transport.TransportService; import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; public class GceUnicastHostsProvider extends AbstractComponent implements UnicastHostsProvider { @@ -72,7 +68,7 @@ static final class Status { private final TimeValue refreshInterval; private long lastRefresh; - private List cachedDiscoNodes; + private List cachedDynamicHosts; public GceUnicastHostsProvider(Settings settings, GceInstancesService gceInstancesService, TransportService transportService, @@ -97,7 +93,7 @@ public GceUnicastHostsProvider(Settings settings, GceInstancesService gceInstanc * Information can be cached using `cloud.gce.refresh_interval` property if needed. */ @Override - public List buildDynamicNodes() { + public List buildDynamicHosts() { // We check that needed properties have been set if (this.project == null || this.project.isEmpty() || this.zones == null || this.zones.isEmpty()) { throw new IllegalArgumentException("one or more gce discovery settings are missing. " + @@ -106,16 +102,16 @@ public List buildDynamicNodes() { } if (refreshInterval.millis() != 0) { - if (cachedDiscoNodes != null && + if (cachedDynamicHosts != null && (refreshInterval.millis() < 0 || (System.currentTimeMillis() - lastRefresh) < refreshInterval.millis())) { if (logger.isTraceEnabled()) logger.trace("using cache to retrieve node list"); - return cachedDiscoNodes; + return cachedDynamicHosts; } lastRefresh = System.currentTimeMillis(); } logger.debug("start building nodes list using GCE API"); - cachedDiscoNodes = new ArrayList<>(); + cachedDynamicHosts = new ArrayList<>(); String ipAddress = null; try { InetAddress inetAddress = networkService.resolvePublishHostAddresses( @@ -133,7 +129,7 @@ public List buildDynamicNodes() { if (instances == null) { logger.trace("no instance found for project [{}], zones [{}].", this.project, this.zones); - return cachedDiscoNodes; + return cachedDynamicHosts; } for (Instance instance : instances) { @@ -238,8 +234,7 @@ public List buildDynamicNodes() { for (TransportAddress transportAddress : addresses) { logger.trace("adding {}, type {}, address {}, transport_address {}, status {}", name, type, ip_private, transportAddress, status); - cachedDiscoNodes.add(new DiscoveryNode("#cloud-" + name + "-" + 0, transportAddress, - emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion())); + cachedDynamicHosts.add(transportAddress); } } } catch (Exception e) { @@ -252,9 +247,9 @@ public List buildDynamicNodes() { logger.warn("exception caught during discovery", e); } - logger.debug("{} node(s) added", cachedDiscoNodes.size()); - logger.debug("using dynamic discovery nodes {}", cachedDiscoNodes); + logger.debug("{} addresses added", cachedDynamicHosts.size()); + logger.debug("using transport addresses {}", cachedDynamicHosts); - return cachedDiscoNodes; + return cachedDynamicHosts; } } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java index 31ea9bdb1c21e..a1944a15d8036 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java @@ -21,9 +21,9 @@ import org.elasticsearch.Version; import org.elasticsearch.cloud.gce.GceInstancesServiceImpl; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; @@ -40,7 +40,6 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; /** * This test class uses a GCE HTTP Mock system which allows to simulate JSON Responses. @@ -105,13 +104,13 @@ public void stopGceComputeService() throws IOException { } } - protected List buildDynamicNodes(GceInstancesServiceImpl gceInstancesService, Settings nodeSettings) { + protected List buildDynamicNodes(GceInstancesServiceImpl gceInstancesService, Settings nodeSettings) { GceUnicastHostsProvider provider = new GceUnicastHostsProvider(nodeSettings, gceInstancesService, transportService, new NetworkService(Collections.emptyList())); - List discoveryNodes = provider.buildDynamicNodes(); - logger.info("--> nodes found: {}", discoveryNodes); - return discoveryNodes; + List dynamicHosts = provider.buildDynamicHosts(); + logger.info("--> addresses found: {}", dynamicHosts); + return dynamicHosts; } public void testNodesWithDifferentTagsAndNoTagSet() { @@ -120,8 +119,8 @@ public void testNodesWithDifferentTagsAndNoTagSet() { .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(2)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); } public void testNodesWithDifferentTagsAndOneTagSet() { @@ -131,9 +130,8 @@ public void testNodesWithDifferentTagsAndOneTagSet() { .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(1)); - assertThat(discoveryNodes.get(0).getId(), is("#cloud-test2-0")); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(1)); } public void testNodesWithDifferentTagsAndTwoTagSet() { @@ -143,9 +141,8 @@ public void testNodesWithDifferentTagsAndTwoTagSet() { .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(1)); - assertThat(discoveryNodes.get(0).getId(), is("#cloud-test2-0")); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(1)); } public void testNodesWithSameTagsAndNoTagSet() { @@ -154,8 +151,8 @@ public void testNodesWithSameTagsAndNoTagSet() { .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(2)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); } public void testNodesWithSameTagsAndOneTagSet() { @@ -165,8 +162,8 @@ public void testNodesWithSameTagsAndOneTagSet() { .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(2)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); } public void testNodesWithSameTagsAndTwoTagsSet() { @@ -176,8 +173,8 @@ public void testNodesWithSameTagsAndTwoTagsSet() { .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(2)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); } public void testMultipleZonesAndTwoNodesInSameZone() { @@ -186,8 +183,8 @@ public void testMultipleZonesAndTwoNodesInSameZone() { .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(2)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); } public void testMultipleZonesAndTwoNodesInDifferentZones() { @@ -196,8 +193,8 @@ public void testMultipleZonesAndTwoNodesInDifferentZones() { .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(2)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); } /** @@ -209,8 +206,8 @@ public void testZeroNode43() { .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(0)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(0)); } public void testIllegalSettingsMissingAllRequired() { @@ -261,7 +258,7 @@ public void testNoRegionReturnsEmptyList() { .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(1)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(1)); } } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java index 9ff3215cd6480..d719f9d123b8c 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java @@ -19,7 +19,7 @@ package org.elasticsearch.discovery.zen; -import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.transport.TransportAddress; import java.util.List; @@ -31,5 +31,5 @@ public interface UnicastHostsProvider { /** * Builds the dynamic list of unicast hosts to be used for unicast discovery. */ - List buildDynamicNodes(); + List buildDynamicHosts(); } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index e9ac1deec0ab4..cbadbb4a1e09b 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -118,9 +118,6 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { private final AtomicInteger pingingRoundIdGenerator = new AtomicInteger(); - // used as a node id prefix for configured unicast host nodes/address - private static final String UNICAST_NODE_PREFIX = "#zen_unicast_"; - private final Map activePingingRounds = newConcurrentMap(); // a list of temporal responses a node will return for a request (holds responses from other nodes) @@ -184,23 +181,20 @@ public UnicastZenPing(Settings settings, ThreadPool threadPool, TransportService * @param hosts the hosts to resolve * @param limitPortCounts the number of ports to resolve (should be 1 for non-local transport) * @param transportService the transport service - * @param nodeId_prefix a prefix to use for node ids * @param resolveTimeout the timeout before returning from hostname lookups - * @return a list of discovery nodes with resolved transport addresses + * @return a list of resolved transport addresses */ - public static List resolveHostsLists( + public static List resolveHostsLists( final ExecutorService executorService, final Logger logger, final List hosts, final int limitPortCounts, final TransportService transportService, - final String nodeId_prefix, final TimeValue resolveTimeout) throws InterruptedException { Objects.requireNonNull(executorService); Objects.requireNonNull(logger); Objects.requireNonNull(hosts); Objects.requireNonNull(transportService); - Objects.requireNonNull(nodeId_prefix); Objects.requireNonNull(resolveTimeout); if (resolveTimeout.nanos() < 0) { throw new IllegalArgumentException("resolve timeout must be non-negative but was [" + resolveTimeout + "]"); @@ -213,7 +207,7 @@ public static List resolveHostsLists( .collect(Collectors.toList()); final List> futures = executorService.invokeAll(callables, resolveTimeout.nanos(), TimeUnit.NANOSECONDS); - final List discoveryNodes = new ArrayList<>(); + final List transportAddresses = new ArrayList<>(); final Set localAddresses = new HashSet<>(); localAddresses.add(transportService.boundAddress().publishAddress()); localAddresses.addAll(Arrays.asList(transportService.boundAddress().boundAddresses())); @@ -231,13 +225,7 @@ public static List resolveHostsLists( final TransportAddress address = addresses[addressId]; // no point in pinging ourselves if (localAddresses.contains(address) == false) { - discoveryNodes.add( - new DiscoveryNode( - nodeId_prefix + hostname + "_" + addressId + "#", - address, - emptyMap(), - emptySet(), - Version.CURRENT.minimumCompatibilityVersion())); + transportAddresses.add(address); } } } catch (final ExecutionException e) { @@ -249,7 +237,7 @@ public static List resolveHostsLists( logger.warn("timed out after [{}] resolving host [{}]", resolveTimeout, hostname); } } - return discoveryNodes; + return Collections.unmodifiableList(transportAddresses); } @Override @@ -292,29 +280,28 @@ public void ping(final Consumer resultsConsumer, final TimeValue protected void ping(final Consumer resultsConsumer, final TimeValue scheduleDuration, final TimeValue requestDuration) { - final List seedNodes; + final List seedAddresses = new ArrayList<>(); try { - seedNodes = resolveHostsLists( + seedAddresses.addAll(resolveHostsLists( unicastZenPingExecutorService, logger, configuredHosts, limitPortCounts, transportService, - UNICAST_NODE_PREFIX, - resolveTimeout); + resolveTimeout)); } catch (InterruptedException e) { throw new RuntimeException(e); } - seedNodes.addAll(hostsProvider.buildDynamicNodes()); + seedAddresses.addAll(hostsProvider.buildDynamicHosts()); final DiscoveryNodes nodes = contextProvider.clusterState().nodes(); // add all possible master nodes that were active in the last known cluster configuration for (ObjectCursor masterNode : nodes.getMasterNodes().values()) { - seedNodes.add(masterNode.value); + seedAddresses.add(masterNode.value.getAddress()); } final ConnectionProfile connectionProfile = ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, requestDuration, requestDuration); - final PingingRound pingingRound = new PingingRound(pingingRoundIdGenerator.incrementAndGet(), seedNodes, resultsConsumer, + final PingingRound pingingRound = new PingingRound(pingingRoundIdGenerator.incrementAndGet(), seedAddresses, resultsConsumer, nodes.getLocalNode(), connectionProfile); activePingingRounds.put(pingingRound.id(), pingingRound); final AbstractRunnable pingSender = new AbstractRunnable() { @@ -356,17 +343,17 @@ protected class PingingRound implements Releasable { private final Map tempConnections = new HashMap<>(); private final KeyedLock connectionLock = new KeyedLock<>(true); private final PingCollection pingCollection; - private final List seedNodes; + private final List seedAddresses; private final Consumer pingListener; private final DiscoveryNode localNode; private final ConnectionProfile connectionProfile; private AtomicBoolean closed = new AtomicBoolean(false); - PingingRound(int id, List seedNodes, Consumer resultsConsumer, DiscoveryNode localNode, + PingingRound(int id, List seedAddresses, Consumer resultsConsumer, DiscoveryNode localNode, ConnectionProfile connectionProfile) { this.id = id; - this.seedNodes = Collections.unmodifiableList(new ArrayList<>(seedNodes)); + this.seedAddresses = Collections.unmodifiableList(seedAddresses.stream().distinct().collect(Collectors.toList())); this.pingListener = resultsConsumer; this.localNode = localNode; this.connectionProfile = connectionProfile; @@ -381,9 +368,9 @@ public boolean isClosed() { return this.closed.get(); } - public List getSeedNodes() { + public List getSeedAddresses() { ensureOpen(); - return seedNodes; + return seedAddresses; } public Connection getOrConnect(DiscoveryNode node) throws IOException { @@ -457,26 +444,28 @@ protected void sendPings(final TimeValue timeout, final PingingRound pingingRoun final ClusterState lastState = contextProvider.clusterState(); final UnicastPingRequest pingRequest = new UnicastPingRequest(pingingRound.id(), timeout, createPingResponse(lastState)); - Set nodesFromResponses = temporalResponses.stream().map(pingResponse -> { + List temporalAddresses = temporalResponses.stream().map(pingResponse -> { assert clusterName.equals(pingResponse.clusterName()) : "got a ping request from a different cluster. expected " + clusterName + " got " + pingResponse.clusterName(); - return pingResponse.node(); - }).collect(Collectors.toSet()); - - // dedup by address - final Map uniqueNodesByAddress = - Stream.concat(pingingRound.getSeedNodes().stream(), nodesFromResponses.stream()) - .collect(Collectors.toMap(DiscoveryNode::getAddress, Function.identity(), (n1, n2) -> n1)); + return pingResponse.node().getAddress(); + }).collect(Collectors.toList()); + final Stream uniqueAddresses = Stream.concat(pingingRound.getSeedAddresses().stream(), + temporalAddresses.stream()).distinct(); // resolve what we can via the latest cluster state - final Set nodesToPing = uniqueNodesByAddress.values().stream() - .map(node -> { - DiscoveryNode foundNode = lastState.nodes().findByAddress(node.getAddress()); - if (foundNode == null) { - return node; - } else { + final Set nodesToPing = uniqueAddresses + .map(address -> { + DiscoveryNode foundNode = lastState.nodes().findByAddress(address); + if (foundNode != null && transportService.nodeConnected(foundNode)) { return foundNode; + } else { + return new DiscoveryNode( + address.toString(), + address, + emptyMap(), + emptySet(), + Version.CURRENT.minimumCompatibilityVersion()); } }).collect(Collectors.toSet()); diff --git a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java index fdc36152cc895..33c87ea7f383e 100644 --- a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java @@ -84,7 +84,7 @@ public void testDoesNotRespondToZenPings() throws Exception { internalCluster().getInstance(TransportService.class); // try to ping the single node directly final UnicastHostsProvider provider = - () -> Collections.singletonList(nodeTransport.getLocalNode()); + () -> Collections.singletonList(nodeTransport.getLocalNode().getAddress()); final CountDownLatch latch = new CountDownLatch(1); final DiscoveryNodes nodes = DiscoveryNodes.builder() .add(nodeTransport.getLocalNode()) diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index f71ffe28b50f6..4aa75077431e7 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -408,19 +408,18 @@ public BoundTransportAddress boundAddress() { Collections.emptySet()); closeables.push(transportService); final int limitPortCounts = randomIntBetween(1, 10); - final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = TestUnicastZenPing.resolveHostsLists( executorService, logger, Collections.singletonList("127.0.0.1"), limitPortCounts, transportService, - "test_", TimeValue.timeValueSeconds(1)); - assertThat(discoveryNodes, hasSize(limitPortCounts)); + assertThat(transportAddresses, hasSize(limitPortCounts)); final Set ports = new HashSet<>(); - for (final DiscoveryNode discoveryNode : discoveryNodes) { - assertTrue(discoveryNode.getAddress().address().getAddress().isLoopbackAddress()); - ports.add(discoveryNode.getAddress().getPort()); + for (final TransportAddress address : transportAddresses) { + assertTrue(address.address().getAddress().isLoopbackAddress()); + ports.add(address.getPort()); } assertThat(ports, equalTo(IntStream.range(9300, 9300 + limitPortCounts).mapToObj(m -> m).collect(Collectors.toSet()))); } @@ -453,19 +452,18 @@ public BoundTransportAddress boundAddress() { new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); closeables.push(transportService); - final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = TestUnicastZenPing.resolveHostsLists( executorService, logger, Collections.singletonList(NetworkAddress.format(loopbackAddress)), 10, transportService, - "test_", TimeValue.timeValueSeconds(1)); - assertThat(discoveryNodes, hasSize(7)); + assertThat(transportAddresses, hasSize(7)); final Set ports = new HashSet<>(); - for (final DiscoveryNode discoveryNode : discoveryNodes) { - assertTrue(discoveryNode.getAddress().address().getAddress().isLoopbackAddress()); - ports.add(discoveryNode.getAddress().getPort()); + for (final TransportAddress address : transportAddresses) { + assertTrue(address.address().getAddress().isLoopbackAddress()); + ports.add(address.getPort()); } assertThat(ports, equalTo(IntStream.range(9303, 9310).mapToObj(m -> m).collect(Collectors.toSet()))); } @@ -505,17 +503,16 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi Collections.emptySet()); closeables.push(transportService); - final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = TestUnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList(hostname), 1, transportService, - "test_", TimeValue.timeValueSeconds(1) ); - assertThat(discoveryNodes, empty()); + assertThat(transportAddresses, empty()); verify(logger).warn("failed to resolve host [" + hostname + "]", unknownHostException); } @@ -565,16 +562,15 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi closeables.push(transportService); final TimeValue resolveTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 3)); try { - final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = TestUnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList("hostname1", "hostname2"), 1, transportService, - "test+", resolveTimeout); - assertThat(discoveryNodes, hasSize(1)); + assertThat(transportAddresses, hasSize(1)); verify(logger).trace( "resolved host [{}] to {}", "hostname1", new TransportAddress[]{new TransportAddress(TransportAddress.META_ADDRESS, 9300)}); @@ -732,17 +728,16 @@ public BoundTransportAddress boundAddress() { new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); closeables.push(transportService); - final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = TestUnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList("127.0.0.1:9300:9300", "127.0.0.1:9301"), 1, transportService, - "test_", TimeValue.timeValueSeconds(1)); - assertThat(discoveryNodes, hasSize(1)); // only one of the two is valid and will be used - assertThat(discoveryNodes.get(0).getAddress().getAddress(), equalTo("127.0.0.1")); - assertThat(discoveryNodes.get(0).getAddress().getPort(), equalTo(9301)); + assertThat(transportAddresses, hasSize(1)); // only one of the two is valid and will be used + assertThat(transportAddresses.get(0).getAddress(), equalTo("127.0.0.1")); + assertThat(transportAddresses.get(0).getPort(), equalTo(9301)); verify(logger).warn(eq("failed to resolve host [127.0.0.1:9300:9300]"), Matchers.any(ExecutionException.class)); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java index 46bbdcc7646c4..2e60a3c518dd3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.discovery.zen.UnicastHostsProvider; @@ -55,7 +56,7 @@ public MockUncasedHostProvider(Supplier localNodeSupplier, Cluste } @Override - public List buildDynamicNodes() { + public List buildDynamicHosts() { final DiscoveryNode localNode = getNode(); assert localNode != null; synchronized (activeNodesPerCluster) { @@ -64,6 +65,7 @@ public List buildDynamicNodes() { .map(MockUncasedHostProvider::getNode) .filter(Objects::nonNull) .filter(n -> localNode.equals(n) == false) + .map(DiscoveryNode::getAddress) .collect(Collectors.toList()); } } From 0a324b9943758bb976f93c99031ce517507348f0 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 21 Jun 2018 07:59:55 -0700 Subject: [PATCH 57/92] Core: Convert TransportAction.execute uses to client calls (#31487) This commit converts some of the existing calls to TransportAction.execute to use the equivalent client method for the desired action. --- .../TransportMultiSearchTemplateAction.java | 10 ++-- .../TransportSearchTemplateAction.java | 14 +++--- .../indices/create/CreateIndexResponse.java | 3 +- .../upgrade/post/TransportUpgradeAction.java | 10 ++-- .../action/bulk/TransportBulkAction.java | 19 +++---- .../ingest/PutPipelineTransportAction.java | 10 ++-- .../search/TransportMultiSearchAction.java | 16 +++--- .../action/update/TransportUpdateAction.java | 22 ++++---- .../tasks/TaskResultsService.java | 9 +--- .../bulk/TransportBulkActionTookTests.java | 50 ++++++------------- .../search/MultiSearchActionTookTests.java | 16 +++--- .../TransportMultiSearchActionTests.java | 12 ++--- .../action/TransportGraphExploreAction.java | 12 ++--- 13 files changed, 83 insertions(+), 120 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java index 3e80b0f247883..7451c89cdb494 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java @@ -23,9 +23,9 @@ import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.TransportMultiSearchAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -42,16 +42,16 @@ public class TransportMultiSearchTemplateAction extends HandledTransportAction { + client.multiSearch(multiSearchRequest, ActionListener.wrap(r -> { for (int i = 0; i < r.getResponses().length; i++) { MultiSearchResponse.Item item = r.getResponses()[i]; int originalSlot = originalSlots.get(i); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index a910ec384ee12..c241678cc5f44 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -22,9 +22,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -50,20 +50,18 @@ public class TransportSearchTemplateAction extends HandledTransportAction) SearchTemplateRequest::new); this.scriptService = scriptService; - this.searchAction = searchAction; this.xContentRegistry = xContentRegistry; + this.client = client; } @Override @@ -72,7 +70,7 @@ protected void doExecute(SearchTemplateRequest request, ActionListener() { + client.search(searchRequest, new ActionListener() { @Override public void onResponse(SearchResponse searchResponse) { try { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java index 4e3a5685bda13..c858d0bb10651 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java @@ -55,8 +55,7 @@ protected static void declareFields(Constructing private String index; - protected CreateIndexResponse() { - } + public CreateIndexResponse() {} protected CreateIndexResponse(boolean acknowledged, boolean shardsAcknowledged, String index) { super(acknowledged, shardsAcknowledged); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java index 67e51c8e5575c..0bc2134cb505a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -58,16 +59,15 @@ public class TransportUpgradeAction extends TransportBroadcastByNodeAction { private final IndicesService indicesService; - - private final TransportUpgradeSettingsAction upgradeSettingsAction; + private final NodeClient client; @Inject public TransportUpgradeAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, TransportUpgradeSettingsAction upgradeSettingsAction) { + IndexNameExpressionResolver indexNameExpressionResolver, NodeClient client) { super(settings, UpgradeAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, UpgradeRequest::new, ThreadPool.Names.FORCE_MERGE); this.indicesService = indicesService; - this.upgradeSettingsAction = upgradeSettingsAction; + this.client = client; } @Override @@ -205,7 +205,7 @@ public void onFailure(Exception e) { private void updateSettings(final UpgradeResponse upgradeResponse, final ActionListener listener) { UpgradeSettingsRequest upgradeSettingsRequest = new UpgradeSettingsRequest(upgradeResponse.versions()); - upgradeSettingsAction.execute(upgradeSettingsRequest, new ActionListener() { + client.executeLocally(UpgradeSettingsAction.INSTANCE, upgradeSettingsRequest, new ActionListener() { @Override public void onResponse(UpgradeSettingsResponse updateSettingsResponse) { listener.onResponse(upgradeResponse); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 2fcf30b3ae8a9..153a7d8d45a7b 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.IngestActionForwarder; import org.elasticsearch.action.support.ActionFilters; @@ -38,6 +37,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.update.TransportUpdateAction; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -88,27 +88,24 @@ public class TransportBulkAction extends HandledTransportAction responses, int idx, DocWriteRequest request, String index, Exception e) { diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 7dde981804939..17af73c167704 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -23,9 +23,9 @@ import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -47,16 +47,16 @@ public class PutPipelineTransportAction extends TransportMasterNodeAction() { + client.admin().cluster().nodesInfo(nodesInfoRequest, new ActionListener() { @Override public void onResponse(NodesInfoResponse nodeInfos) { try { diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index 89367f71ef38b..ce35c1e94f83a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.service.ClusterService; @@ -43,27 +43,27 @@ public class TransportMultiSearchAction extends HandledTransportAction searchAction; private final LongSupplier relativeTimeProvider; + private final NodeClient client; @Inject public TransportMultiSearchAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ClusterService clusterService, TransportSearchAction searchAction, ActionFilters actionFilters) { + ClusterService clusterService, ActionFilters actionFilters, NodeClient client) { super(settings, MultiSearchAction.NAME, threadPool, transportService, actionFilters, MultiSearchRequest::new); this.clusterService = clusterService; - this.searchAction = searchAction; this.availableProcessors = EsExecutors.numberOfProcessors(settings); this.relativeTimeProvider = System::nanoTime; + this.client = client; } TransportMultiSearchAction(ThreadPool threadPool, ActionFilters actionFilters, TransportService transportService, - ClusterService clusterService, TransportAction searchAction, - int availableProcessors, LongSupplier relativeTimeProvider) { + ClusterService clusterService, int availableProcessors, + LongSupplier relativeTimeProvider, NodeClient client) { super(Settings.EMPTY, MultiSearchAction.NAME, threadPool, transportService, actionFilters, MultiSearchRequest::new); this.clusterService = clusterService; - this.searchAction = searchAction; this.availableProcessors = availableProcessors; this.relativeTimeProvider = relativeTimeProvider; + this.client = client; } @Override @@ -141,7 +141,7 @@ void executeSearch( * when we handle the response rather than going recursive, we fork to another thread, otherwise we recurse. */ final Thread thread = Thread.currentThread(); - searchAction.execute(request.request, new ActionListener() { + client.search(request.request, new ActionListener() { @Override public void onResponse(final SearchResponse searchResponse) { handleResponse(request.responseSlot, new MultiSearchResponse.Item(searchResponse, null)); diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 91911129dfac7..9faf22d464cbb 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -24,8 +24,6 @@ import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; -import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; @@ -34,6 +32,7 @@ import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.instance.TransportInstanceSingleOperationAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; @@ -66,22 +65,21 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationAction { - private final TransportBulkAction bulkAction; private final AutoCreateIndex autoCreateIndex; - private final TransportCreateIndexAction createIndexAction; private final UpdateHelper updateHelper; private final IndicesService indicesService; + private final NodeClient client; @Inject public TransportUpdateAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, - TransportBulkAction bulkAction, TransportCreateIndexAction createIndexAction, UpdateHelper updateHelper, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService, AutoCreateIndex autoCreateIndex) { + UpdateHelper updateHelper, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService, + AutoCreateIndex autoCreateIndex, NodeClient client) { super(settings, UpdateAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, UpdateRequest::new); - this.bulkAction = bulkAction; - this.createIndexAction = createIndexAction; this.updateHelper = updateHelper; this.indicesService = indicesService; this.autoCreateIndex = autoCreateIndex; + this.client = client; } @Override @@ -116,7 +114,7 @@ public static void resolveAndValidateRouting(MetaData metaData, String concreteI protected void doExecute(final UpdateRequest request, final ActionListener listener) { // if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) { - createIndexAction.execute(new CreateIndexRequest().index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener() { + client.admin().indices().create(new CreateIndexRequest().index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener() { @Override public void onResponse(CreateIndexResponse result) { innerExecute(request, listener); @@ -177,7 +175,7 @@ protected void shardOperation(final UpdateRequest request, final ActionListener< IndexRequest upsertRequest = result.action(); // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request final BytesReference upsertSourceBytes = upsertRequest.source(); - bulkAction.execute(toSingleItemBulkRequest(upsertRequest), wrapBulkResponse( + client.bulk(toSingleItemBulkRequest(upsertRequest), wrapBulkResponse( ActionListener.wrap(response -> { UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult()); if (request.fetchSource() != null && request.fetchSource().fetchSource()) { @@ -197,7 +195,7 @@ protected void shardOperation(final UpdateRequest request, final ActionListener< IndexRequest indexRequest = result.action(); // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request final BytesReference indexSourceBytes = indexRequest.source(); - bulkAction.execute(toSingleItemBulkRequest(indexRequest), wrapBulkResponse( + client.bulk(toSingleItemBulkRequest(indexRequest), wrapBulkResponse( ActionListener.wrap(response -> { UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult()); update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes)); @@ -208,7 +206,7 @@ protected void shardOperation(final UpdateRequest request, final ActionListener< break; case DELETED: DeleteRequest deleteRequest = result.action(); - bulkAction.execute(toSingleItemBulkRequest(deleteRequest), wrapBulkResponse( + client.bulk(toSingleItemBulkRequest(deleteRequest), wrapBulkResponse( ActionListener.wrap(response -> { UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult()); update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null)); diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java index 6ec949a0c918b..b94902132fea2 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; @@ -69,15 +68,11 @@ public class TaskResultsService extends AbstractComponent { private final ClusterService clusterService; - private final TransportCreateIndexAction createIndexAction; - @Inject - public TaskResultsService(Settings settings, Client client, ClusterService clusterService, - TransportCreateIndexAction createIndexAction) { + public TaskResultsService(Settings settings, Client client, ClusterService clusterService) { super(settings); this.client = client; this.clusterService = clusterService; - this.createIndexAction = createIndexAction; } public void storeResult(TaskResult taskResult, ActionListener listener) { @@ -91,7 +86,7 @@ public void storeResult(TaskResult taskResult, ActionListener listener) { createIndexRequest.mapping(TASK_TYPE, taskResultIndexMapping(), XContentType.JSON); createIndexRequest.cause("auto(task api)"); - createIndexAction.execute(null, createIndexRequest, new ActionListener() { + client.admin().indices().create(createIndexRequest, new ActionListener() { @Override public void onResponse(CreateIndexResponse result) { doStoreResult(taskResult, listener); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index af8289f0c45b1..9d5193180299d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -21,16 +21,17 @@ package org.elasticsearch.action.bulk; import org.apache.lucene.util.Constants; +import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -99,14 +100,13 @@ private TransportBulkAction createAction(boolean controlled, AtomicLong expected IndexNameExpressionResolver resolver = new Resolver(Settings.EMPTY); ActionFilters actionFilters = new ActionFilters(new HashSet<>()); - TransportCreateIndexAction createIndexAction = new TransportCreateIndexAction( - Settings.EMPTY, - transportService, - clusterService, - threadPool, - null, - actionFilters, - resolver); + NodeClient client = new NodeClient(Settings.EMPTY, threadPool) { + @Override + public + void doExecute(Action action, Request request, ActionListener listener) { + listener.onResponse((Response)new CreateIndexResponse()); + } + }; if (controlled) { @@ -116,7 +116,7 @@ private TransportBulkAction createAction(boolean controlled, AtomicLong expected transportService, clusterService, null, - createIndexAction, + client, actionFilters, resolver, null, @@ -141,7 +141,7 @@ void executeBulk( transportService, clusterService, null, - createIndexAction, + client, actionFilters, resolver, null, @@ -223,7 +223,7 @@ static class TestTransportBulkAction extends TransportBulkAction { TransportService transportService, ClusterService clusterService, TransportShardBulkAction shardBulkAction, - TransportCreateIndexAction createIndexAction, + NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, AutoCreateIndex autoCreateIndex, @@ -235,7 +235,7 @@ static class TestTransportBulkAction extends TransportBulkAction { clusterService, null, shardBulkAction, - createIndexAction, + client, actionFilters, indexNameExpressionResolver, autoCreateIndex, @@ -253,24 +253,4 @@ boolean shouldAutoCreate(String index, ClusterState state) { } } - - static class TestTransportCreateIndexAction extends TransportCreateIndexAction { - - TestTransportCreateIndexAction( - Settings settings, - TransportService transportService, - ClusterService clusterService, - ThreadPool threadPool, - MetaDataCreateIndexService createIndexService, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, transportService, clusterService, threadPool, createIndexService, actionFilters, indexNameExpressionResolver); - } - - @Override - protected void doExecute(Task task, CreateIndexRequest request, ActionListener listener) { - listener.onResponse(newResponse()); - } - } - } diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index 39e9ec805e070..94bc6b01ec168 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -148,10 +148,9 @@ public TaskManager getTaskManager() { final ExecutorService commonExecutor = threadPool.executor(threadPoolNames.get(0)); final Set requests = Collections.newSetFromMap(Collections.synchronizedMap(new IdentityHashMap<>())); - TransportAction searchAction = new TransportAction(Settings.EMPTY, - "action", threadPool, actionFilters, taskManager) { + NodeClient client = new NodeClient(settings, threadPool) { @Override - protected void doExecute(SearchRequest request, ActionListener listener) { + public void search(final SearchRequest request, final ActionListener listener) { requests.add(request); commonExecutor.execute(() -> { counter.decrementAndGet(); @@ -161,8 +160,8 @@ protected void doExecute(SearchRequest request, ActionListener l }; if (controlledClock) { - return new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, - availableProcessors, expected::get) { + return new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, availableProcessors, + expected::get, client) { @Override void executeSearch(final Queue requests, final AtomicArray responses, final AtomicInteger responseCounter, final ActionListener listener, long startTimeInNanos) { @@ -171,9 +170,8 @@ void executeSearch(final Queue requests, final AtomicArray requests, final AtomicArray responses, final AtomicInteger responseCounter, final ActionListener listener, long startTimeInNanos) { diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java index 26d5cf2cc14be..a43584a4130e4 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionTestUtils; -import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -107,15 +107,14 @@ public TaskManager getTaskManager() { final ExecutorService commonExecutor = threadPool.executor(threadPoolNames.get(0)); final ExecutorService rarelyExecutor = threadPool.executor(threadPoolNames.get(1)); final Set requests = Collections.newSetFromMap(Collections.synchronizedMap(new IdentityHashMap<>())); - TransportAction searchAction = new TransportAction - (Settings.EMPTY, "action", threadPool, actionFilters, taskManager) { + NodeClient client = new NodeClient(settings, threadPool) { @Override - protected void doExecute(SearchRequest request, ActionListener listener) { + public void search(final SearchRequest request, final ActionListener listener) { requests.add(request); int currentConcurrentSearches = counter.incrementAndGet(); if (currentConcurrentSearches > maxAllowedConcurrentSearches) { errorHolder.set(new AssertionError("Current concurrent search [" + currentConcurrentSearches + - "] is higher than is allowed [" + maxAllowedConcurrentSearches + "]")); + "] is higher than is allowed [" + maxAllowedConcurrentSearches + "]")); } final ExecutorService executorService = rarely() ? rarelyExecutor : commonExecutor; executorService.execute(() -> { @@ -126,8 +125,7 @@ protected void doExecute(SearchRequest request, ActionListener l }; TransportMultiSearchAction action = - new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, 10, - System::nanoTime); + new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, 10, System::nanoTime, client); // Execute the multi search api and fail if we find an error after executing: try { diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 07035967d2abf..1c1dfb476da7d 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -13,9 +13,9 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -65,7 +65,7 @@ */ public class TransportGraphExploreAction extends HandledTransportAction { - private final TransportSearchAction searchAction; + private final NodeClient client; protected final XPackLicenseState licenseState; static class VertexPriorityQueue extends PriorityQueue { @@ -82,12 +82,12 @@ protected boolean lessThan(Vertex a, Vertex b) { } @Inject - public TransportGraphExploreAction(Settings settings, ThreadPool threadPool, TransportSearchAction transportSearchAction, + public TransportGraphExploreAction(Settings settings, ThreadPool threadPool, NodeClient client, TransportService transportService, ActionFilters actionFilters, XPackLicenseState licenseState) { super(settings, GraphExploreAction.NAME, threadPool, transportService, actionFilters, (Supplier)GraphExploreRequest::new); - this.searchAction = transportSearchAction; + this.client = client; this.licenseState = licenseState; } @@ -313,7 +313,7 @@ synchronized void expand() { // System.out.println(source); logger.trace("executing expansion graph search request"); - searchAction.execute(searchRequest, new ActionListener() { + client.search(searchRequest, new ActionListener() { @Override public void onResponse(SearchResponse searchResponse) { // System.out.println(searchResponse); @@ -660,7 +660,7 @@ public synchronized void start() { searchRequest.source(source); // System.out.println(source); logger.trace("executing initial graph search request"); - searchAction.execute(searchRequest, new ActionListener() { + client.search(searchRequest, new ActionListener() { @Override public void onResponse(SearchResponse searchResponse) { addShardFailures(searchResponse.getShardFailures()); From 68ec9588737d97c2a9282308fce10ac6f3cf03c7 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 21 Jun 2018 08:19:23 -0700 Subject: [PATCH 58/92] [DOCS] Move migration APIs to docs (#31473) --- .../reference/migration/apis}/assistance.asciidoc | 1 + .../reference/migration/apis}/deprecation.asciidoc | 1 + .../reference/migration/apis}/upgrade.asciidoc | 1 + .../reference/migration}/migration.asciidoc | 7 ++++--- docs/reference/rest-api/index.asciidoc | 2 +- 5 files changed, 8 insertions(+), 4 deletions(-) rename {x-pack/docs/en/rest-api/migration => docs/reference/migration/apis}/assistance.asciidoc (99%) rename {x-pack/docs/en/rest-api/migration => docs/reference/migration/apis}/deprecation.asciidoc (99%) rename {x-pack/docs/en/rest-api/migration => docs/reference/migration/apis}/upgrade.asciidoc (99%) rename {x-pack/docs/en/rest-api => docs/reference/migration}/migration.asciidoc (64%) diff --git a/x-pack/docs/en/rest-api/migration/assistance.asciidoc b/docs/reference/migration/apis/assistance.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/migration/assistance.asciidoc rename to docs/reference/migration/apis/assistance.asciidoc index 1af625a97ecff..ae9972cc062bc 100644 --- a/x-pack/docs/en/rest-api/migration/assistance.asciidoc +++ b/docs/reference/migration/apis/assistance.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[migration-api-assistance]] === Migration Assistance API diff --git a/x-pack/docs/en/rest-api/migration/deprecation.asciidoc b/docs/reference/migration/apis/deprecation.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/migration/deprecation.asciidoc rename to docs/reference/migration/apis/deprecation.asciidoc index 54feee7903af8..a1f0517b82757 100644 --- a/x-pack/docs/en/rest-api/migration/deprecation.asciidoc +++ b/docs/reference/migration/apis/deprecation.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[migration-api-deprecation]] === Deprecation Info APIs diff --git a/x-pack/docs/en/rest-api/migration/upgrade.asciidoc b/docs/reference/migration/apis/upgrade.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/migration/upgrade.asciidoc rename to docs/reference/migration/apis/upgrade.asciidoc index 839a0057e82fe..39a5638cce111 100644 --- a/x-pack/docs/en/rest-api/migration/upgrade.asciidoc +++ b/docs/reference/migration/apis/upgrade.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[migration-api-upgrade]] === Migration Upgrade API diff --git a/x-pack/docs/en/rest-api/migration.asciidoc b/docs/reference/migration/migration.asciidoc similarity index 64% rename from x-pack/docs/en/rest-api/migration.asciidoc rename to docs/reference/migration/migration.asciidoc index 51f1e5fae0f65..a54da21ab1409 100644 --- a/x-pack/docs/en/rest-api/migration.asciidoc +++ b/docs/reference/migration/migration.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[migration-api]] == Migration APIs @@ -8,6 +9,6 @@ The migration APIs simplify upgrading {xpack} indices from one version to anothe * <> * <> -include::migration/assistance.asciidoc[] -include::migration/upgrade.asciidoc[] -include::migration/deprecation.asciidoc[] +include::apis/assistance.asciidoc[] +include::apis/upgrade.asciidoc[] +include::apis/deprecation.asciidoc[] diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index e44eea9aa53f4..9ec57940dd299 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -21,7 +21,7 @@ directly to configure and access {xpack} features. include::info.asciidoc[] include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[] include::{es-repo-dir}/licensing/index.asciidoc[] -include::{xes-repo-dir}/rest-api/migration.asciidoc[] +include::{es-repo-dir}/migration/migration.asciidoc[] include::{xes-repo-dir}/rest-api/ml-api.asciidoc[] include::{xes-repo-dir}/rest-api/rollup-api.asciidoc[] include::{xes-repo-dir}/rest-api/security.asciidoc[] From 872418ff9499011bf6ee8da473fc2c49bbc67a43 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Wed, 20 Jun 2018 21:40:13 +0300 Subject: [PATCH 59/92] [DOCS] Significantly improve SQL docs Introduce SQL commands Move reserved keywords into an appendix Add section on security Introduce concepts section --- x-pack/docs/en/sql/appendix/index.asciidoc | 1 + .../syntax-reserved.asciidoc} | 5 +- x-pack/docs/en/sql/concepts.asciidoc | 63 ++++ x-pack/docs/en/sql/endpoints/cli.asciidoc | 16 - x-pack/docs/en/sql/endpoints/jdbc.asciidoc | 29 +- x-pack/docs/en/sql/endpoints/rest.asciidoc | 14 - .../docs/en/sql/endpoints/translate.asciidoc | 16 - x-pack/docs/en/sql/functions/index.asciidoc | 19 ++ x-pack/docs/en/sql/index.asciidoc | 14 +- .../docs/en/sql/language/data-types.asciidoc | 4 +- x-pack/docs/en/sql/language/index.asciidoc | 9 +- x-pack/docs/en/sql/language/syntax.asciidoc | 123 -------- .../language/syntax/describe-table.asciidoc | 20 ++ .../en/sql/language/syntax/index.asciidoc | 16 + .../en/sql/language/syntax/select.asciidoc | 284 ++++++++++++++++++ .../sql/language/syntax/show-columns.asciidoc | 12 + .../language/syntax/show-functions.asciidoc | 14 + .../sql/language/syntax/show-tables.asciidoc | 14 + x-pack/docs/en/sql/security.asciidoc | 37 +++ 19 files changed, 506 insertions(+), 204 deletions(-) create mode 100644 x-pack/docs/en/sql/appendix/index.asciidoc rename x-pack/docs/en/sql/{language/reserved.asciidoc => appendix/syntax-reserved.asciidoc} (98%) create mode 100644 x-pack/docs/en/sql/concepts.asciidoc delete mode 100644 x-pack/docs/en/sql/language/syntax.asciidoc create mode 100644 x-pack/docs/en/sql/language/syntax/describe-table.asciidoc create mode 100644 x-pack/docs/en/sql/language/syntax/index.asciidoc create mode 100644 x-pack/docs/en/sql/language/syntax/select.asciidoc create mode 100644 x-pack/docs/en/sql/language/syntax/show-columns.asciidoc create mode 100644 x-pack/docs/en/sql/language/syntax/show-functions.asciidoc create mode 100644 x-pack/docs/en/sql/language/syntax/show-tables.asciidoc create mode 100644 x-pack/docs/en/sql/security.asciidoc diff --git a/x-pack/docs/en/sql/appendix/index.asciidoc b/x-pack/docs/en/sql/appendix/index.asciidoc new file mode 100644 index 0000000000000..b00176a8a3f67 --- /dev/null +++ b/x-pack/docs/en/sql/appendix/index.asciidoc @@ -0,0 +1 @@ +include::syntax-reserved.asciidoc[] \ No newline at end of file diff --git a/x-pack/docs/en/sql/language/reserved.asciidoc b/x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc similarity index 98% rename from x-pack/docs/en/sql/language/reserved.asciidoc rename to x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc index 8dc62e90a9eb1..bbdefcbcb54aa 100644 --- a/x-pack/docs/en/sql/language/reserved.asciidoc +++ b/x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc @@ -1,5 +1,6 @@ -[[sql-spec-reserved]] -=== Reserved Keywords +[appendix] +[[sql-syntax-reserved]] += Reserved Keywords Table with reserved keywords that need to be quoted. Also provide an example to make it more obvious. diff --git a/x-pack/docs/en/sql/concepts.asciidoc b/x-pack/docs/en/sql/concepts.asciidoc new file mode 100644 index 0000000000000..f5eab6f37baf8 --- /dev/null +++ b/x-pack/docs/en/sql/concepts.asciidoc @@ -0,0 +1,63 @@ +[[sql-concepts]] +== Conventions and Terminology + +For clarity, it is important to establish the meaning behind certain words as, the same wording might convey different meanings to different readers depending on one's familiarity with SQL versus {es}. + +NOTE: This documentation while trying to be complete, does assume the reader has _basic_ understanding of {es} and/or SQL. If that is not the case, please continue reading the documentation however take notes and pursue the topics that are unclear either through the main {es} documentation or through the plethora of SQL material available in the open (there are simply too many excellent resources here to enumerate). + +As a general rule, {es-sql} as the name indicates provides a SQL interface to {es}. As such, it follows the SQL terminology and conventions first, whenever possible. However the backing engine itself is {es} for which {es-sql} was purposely created hence why features or concepts that are not available, or cannot be mapped correctly, in SQL appear +in {es-sql}. +Last but not least, {es-sql} tries to obey the https://en.wikipedia.org/wiki/Principle_of_least_astonishment[principle of least suprise], though as all things in the world, everything is relative. + +=== Mapping concepts across SQL and {es} + +While SQL and {es} have different terms for the way the data is organized (and different semantics), essentially their purpose is the same. + +So let's start from the bottom; these roughly are: + +[cols="1,1,5", options="header"] +|=== +|SQL +|{es} +|Description + +|`column` +|`field` +|In both cases, at the lowest level, data is stored in in _named_ entries, of a variety of <>, containing _one_ value. SQL calls such an entry a _column_ while {es} a _field_. +Notice that in {es} a field can contain _multiple_ values of the same type (esentially a list) while in SQL, a _column_ can contain _exactly_ one value of said type. +{es-sql} will do its best to preserve the SQL semantic and, depending on the query, reject those that return fields with more than one value. + +|`row` +|`document` +|++Column++s and ++field++s do _not_ exist by themselves; they are part of a `row` or a `document`. The two have slightly different semantics: a `row` tends to be _strict_ (and have more enforcements) while a `document` tends to be a bit more flexible or loose (while still having a structure). + +|`table` +|`index` +|The target against which queries, whether in SQL or {es} get executed against. + +|`schema` +|_implicit_ +|In RDBMS, `schema` is mainly a namespace of tables and typically used as a security boundary. {es} does not provide an equivalent concept for it. However when security is enabled, {es} automatically applies the security enforcement so that a role sees only the data it is allowed to (in SQL jargon, its _schema_). + +|`catalog` or `database` +|`cluster` instance +|In SQL, `catalog` or `database` are used interchangebly and represent a set of schemas that is, a number of tables. +In {es} the set of indices available are grouped in a `cluster`. The semantics also differ a bit; a `database` is essentially yet another namespace (which can have some implications on the way data is stored) while an {es} `cluster` is a runtime instance, or rather a set of at least one {es} instance (typically running distributed). +In practice this means that while in SQL one can potentially have multiple catalogs inside an instance, in {es} one is restricted to only _one_. + +|`cluster` +|`cluster` (federated) +|Traditionally in SQL, _cluster_ refers to a single RDMBS instance which contains a number of ++catalog++s or ++database++s (see above). The same word can be reused inside {es} as well however its semantic clarified a bit. + +While RDBMS tend to have only one running instance, on a single machine (_not_ distributed), {es} goes the opposite way and by default, is distributed and multi-instance. + +Further more, an {es} `cluster` can be connected to other ++cluster++s in a _federated_ fashion thus `cluster` means: + +single cluster:: +Multiple {es} instances typically distributed across machines, running within the same namespace. +multiple clusters:: +Multiple clusters, each with its own namespace, connected to each other in a federated setup (see <>). + +|=== + +As one can see while the mapping between the concepts are not exactly one to one and the semantics somewhat different, there are more things in common than differences. In fact, thanks to SQL declarative nature, many concepts can move across {es} transparently and the terminology of the two likely to be used interchangebly through-out the rest of the material. \ No newline at end of file diff --git a/x-pack/docs/en/sql/endpoints/cli.asciidoc b/x-pack/docs/en/sql/endpoints/cli.asciidoc index edbb1dcace4f1..e04fd96ab7198 100644 --- a/x-pack/docs/en/sql/endpoints/cli.asciidoc +++ b/x-pack/docs/en/sql/endpoints/cli.asciidoc @@ -1,4 +1,3 @@ -[role="xpack"] [[sql-cli]] == SQL CLI @@ -37,18 +36,3 @@ James S.A. Corey |Leviathan Wakes |561 |1306972800000 -------------------------------------------------- // TODO it'd be lovely to be able to assert that this is correct but // that is probably more work then it is worth right now. - -[[sql-cli-permissions]] -[NOTE] -=============================== -If you are using Security you need to add a few permissions to -users so they can run SQL. To run SQL using the CLI a user needs -`read`, `indices:admin/get`, and `cluster:monitor/main`. The -following example configures a role that can run SQL in the CLI -for the `test` and `bort` indices: - -["source","yaml",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-tests}/security/roles.yml[cli_jdbc] --------------------------------------------------- -=============================== diff --git a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc index 6959035bf09e4..84182f8b4a521 100644 --- a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc +++ b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc @@ -1,4 +1,3 @@ -[role="xpack"] [[sql-jdbc]] == SQL JDBC @@ -36,11 +35,11 @@ from `artifacts.elastic.co/maven` by adding it to the repositories list: [float] === Setup -The driver main class is `org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcDriver`. Note the driver -also implements the JDBC 4.0 +Service Provider+ mechanism meaning it is registerd automatically +The driver main class is `org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcDriver`. +Note the driver implements the JDBC 4.0 +Service Provider+ mechanism meaning it is registerd automatically as long as its available in the classpath. -Once registered, the driver expects the following syntax as an URL: +Once registered, the driver understands the following syntax as an URL: ["source","text",subs="attributes"] ---- @@ -120,12 +119,12 @@ Query timeout (in seconds). That is the maximum amount of time waiting for a que To put all of it together, the following URL: -["source","text",subs="attributes"] +["source","text"] ---- jdbc:es://http://server:3456/timezone=UTC&page.size=250 ---- -Opens up a {es-jdbc} connection to `server` on port `3456`, setting the JDBC timezone to `UTC` and its pagesize to `250` entries. +Opens up a {es-sql} connection to `server` on port `3456`, setting the JDBC connection timezone to `UTC` and its pagesize to `250` entries. === API usage @@ -175,20 +174,4 @@ connection. For example: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{jdbc-tests}/SimpleExampleTestCase.java[simple_example] --------------------------------------------------- - -[[sql-jdbc-permissions]] -[NOTE] -=============================== -If you are using Security you need to add a few permissions to -users so they can run SQL. To run SQL a user needs `read` and -`indices:admin/get`. Some parts of the API require -`cluster:monitor/main`. The following example configures a -role that can run SQL in JDBC querying the `test` and `bort` -indices: - -["source","yaml",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-tests}/security/roles.yml[cli_jdbc] --------------------------------------------------- -=============================== +-------------------------------------------------- \ No newline at end of file diff --git a/x-pack/docs/en/sql/endpoints/rest.asciidoc b/x-pack/docs/en/sql/endpoints/rest.asciidoc index d31b03d3e7736..fa5093f8de528 100644 --- a/x-pack/docs/en/sql/endpoints/rest.asciidoc +++ b/x-pack/docs/en/sql/endpoints/rest.asciidoc @@ -186,17 +186,3 @@ or fewer results though. `time_zone` is the time zone to use for date functions and date parsing. `time_zone` defaults to `utc` and can take any values documented http://www.joda.org/joda-time/apidocs/org/joda/time/DateTimeZone.html[here]. - -[[sql-rest-permissions]] -[NOTE] -=============================== -If you are using Security you need to add a few permissions to -users so they can run SQL. To run SQL a user needs `read` and -`indices:admin/get`. The following example configures a role -that can run SQL against the `test` and `bort` indices: - -["source","yaml",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-tests}/security/roles.yml[rest] --------------------------------------------------- -=============================== diff --git a/x-pack/docs/en/sql/endpoints/translate.asciidoc b/x-pack/docs/en/sql/endpoints/translate.asciidoc index 9c1d71af5d35e..be6a77a3caa44 100644 --- a/x-pack/docs/en/sql/endpoints/translate.asciidoc +++ b/x-pack/docs/en/sql/endpoints/translate.asciidoc @@ -1,4 +1,3 @@ -[role="xpack"] [[sql-translate]] == SQL Translate API @@ -57,18 +56,3 @@ the normal <> API. The request body accepts all of the <> that the <> accepts except `cursor`. - -[[sql-translate-permissions]] -[NOTE] -=============================== -If you are using Security you need to add a few permissions to -users so they can run translate SQL. To translate SQL a user -needs `read` and `indices:admin/get`. The following example -configures a role that can run SQL against the `test` and -`bort` indices: - -["source","yaml",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-tests}/security/roles.yml[rest] --------------------------------------------------- -=============================== diff --git a/x-pack/docs/en/sql/functions/index.asciidoc b/x-pack/docs/en/sql/functions/index.asciidoc index a4e7028cf39c3..dd68370dde3e7 100644 --- a/x-pack/docs/en/sql/functions/index.asciidoc +++ b/x-pack/docs/en/sql/functions/index.asciidoc @@ -348,6 +348,25 @@ include-tagged::{sql-specs}/datetime.csv-spec[minuteOfHour] include-tagged::{sql-specs}/datetime.csv-spec[secondOfMinute] -------------------------------------------------- +* Extract + +As an alternative, one can support `EXTRACT` to extract fields from datetimes. +You can run any <> +with `EXTRACT( FROM )`. So + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[extractDayOfYear] +-------------------------------------------------- + +is the equivalent to + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[dayOfYear] +-------------------------------------------------- + + [[sql-functions-aggregate]] === Aggregate Functions diff --git a/x-pack/docs/en/sql/index.asciidoc b/x-pack/docs/en/sql/index.asciidoc index 902ea8ada7e22..4c2130208927a 100644 --- a/x-pack/docs/en/sql/index.asciidoc +++ b/x-pack/docs/en/sql/index.asciidoc @@ -19,7 +19,11 @@ indices and return results in tabular format. <>:: Overview of {es-sql} and its features. <>:: - Start using SQL right away in {es} + Start using SQL right away in {es}. +<>:: + Language conventions across SQL and {es}. +<>:: + Securing {es-sql} and {es}. <>:: Accepts SQL in a JSON document, executes it, and returns the results. @@ -34,15 +38,17 @@ indices and return results in tabular format. <>:: List of functions and operators supported. <>:: - Overview of the {es-sql} language, such as data types, syntax and - reserved keywords. - + Overview of the {es-sql} language, such as supported data types, commands and + syntax. -- include::overview.asciidoc[] include::getting-started.asciidoc[] +include::concepts.asciidoc[] +include::security.asciidoc[] include::endpoints/index.asciidoc[] include::functions/index.asciidoc[] include::language/index.asciidoc[] +include::appendix/index.asciidoc[] :jdbc-tests!: diff --git a/x-pack/docs/en/sql/language/data-types.asciidoc b/x-pack/docs/en/sql/language/data-types.asciidoc index a01c2fda5c726..7e5f045aa6ce9 100644 --- a/x-pack/docs/en/sql/language/data-types.asciidoc +++ b/x-pack/docs/en/sql/language/data-types.asciidoc @@ -1,5 +1,5 @@ [[sql-data-types]] -=== Data Types +== Data Types Most of {es} <> are available in {es-sql}, as indicated below: @@ -42,7 +42,7 @@ uses the data type _particularities_ of the former over the latter as ultimately [[sql-multi-field]] [float] -==== SQL and multi-fields +=== SQL and multi-fields A core concept in {es} is that of an `analyzed` field, that is a full-text value that is interpreted in order to be effectively indexed. These fields are of type <> and are not used for sorting or aggregations as their actual value depends on the <> used hence why {es} also offers the <> type for storing the _exact_ diff --git a/x-pack/docs/en/sql/language/index.asciidoc b/x-pack/docs/en/sql/language/index.asciidoc index 24bf450f1e42e..fdf6f3e7950ca 100644 --- a/x-pack/docs/en/sql/language/index.asciidoc +++ b/x-pack/docs/en/sql/language/index.asciidoc @@ -1,9 +1,10 @@ [[sql-spec]] == SQL Language -This chapter describes the SQL syntax and data types supported in X-Pack. -As a general rule, the syntax tries to adhere as much as possible to ANSI SQL to make the transition seamless. +This chapter describes the SQL semantics supported in X-Pack namely: + +<>:: Data types +<>:: Commands include::data-types.asciidoc[] -include::syntax.asciidoc[] -include::reserved.asciidoc[] +include::syntax/index.asciidoc[] diff --git a/x-pack/docs/en/sql/language/syntax.asciidoc b/x-pack/docs/en/sql/language/syntax.asciidoc deleted file mode 100644 index 5b837c91db2b1..0000000000000 --- a/x-pack/docs/en/sql/language/syntax.asciidoc +++ /dev/null @@ -1,123 +0,0 @@ -[[sql-spec-syntax]] -=== SQL Statement Syntax - -// Big list of the entire syntax in SQL - -// Each entry might get its own file and code snippet - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/select.sql-spec[wildcardWithOrder] --------------------------------------------------- - - -[[sql-spec-syntax-order-by]] -==== `ORDER BY` - -Elasticsearch supports `ORDER BY` for consistent ordering. You add -any field in the index that has <> or -`SCORE()` to sort by `_score`. By default SQL sorts on what it -considers to be the most efficient way to get the results. - -So sorting by a field looks like: - -[source,js] --------------------------------------------------- -POST /_xpack/sql?format=txt -{ - "query": "SELECT * FROM library ORDER BY page_count DESC LIMIT 5" -} --------------------------------------------------- -// CONSOLE -// TEST[setup:library] - -which results in something like: - -[source,text] --------------------------------------------------- - author | name | page_count | release_date ------------------+--------------------+---------------+------------------------ -Peter F. Hamilton|Pandora's Star |768 |2004-03-02T00:00:00.000Z -Vernor Vinge |A Fire Upon the Deep|613 |1992-06-01T00:00:00.000Z -Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -Alastair Reynolds|Revelation Space |585 |2000-03-15T00:00:00.000Z -James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00.000Z --------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/] -// TESTRESPONSE[_cat] - -[[sql-spec-syntax-order-by-score]] -For sorting by score to be meaningful you need to include a full -text query in the `WHERE` clause. If you include multiple full -text queries in the `WHERE` clause then their scores will be -combined using the same rules as Elasticsearch's -<>. Here is a simple example: - -[source,js] --------------------------------------------------- -POST /_xpack/sql?format=txt -{ - "query": "SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY SCORE() DESC" -} --------------------------------------------------- -// CONSOLE -// TEST[setup:library] - -Which results in something like: - -[source,text] --------------------------------------------------- - SCORE() | author | name | page_count | release_date ----------------+---------------+-------------------+---------------+------------------------ -2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z -1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z -1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z --------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/ s/\(/\\\(/ s/\)/\\\)/] -// TESTRESPONSE[_cat] - -Note that you can return `SCORE()` by adding it to the where clause. This -is possible even if you are not sorting by `SCORE()`: - -[source,js] --------------------------------------------------- -POST /_xpack/sql?format=txt -{ - "query": "SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY page_count DESC" -} --------------------------------------------------- -// CONSOLE -// TEST[setup:library] - -[source,text] --------------------------------------------------- - SCORE() | author | name | page_count | release_date ----------------+---------------+-------------------+---------------+------------------------ -2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z -1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z -1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z --------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/ s/\(/\\\(/ s/\)/\\\)/] -// TESTRESPONSE[_cat] - - -[[sql-spec-syntax-extract]] -==== `EXTRACT` - -Elasticsearch supports `EXTRACT` to extract fields from datetimes. -You can run any <> -with `EXTRACT( FROM )`. So - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[extractDayOfYear] --------------------------------------------------- - -is the equivalent to - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[dayOfYear] --------------------------------------------------- diff --git a/x-pack/docs/en/sql/language/syntax/describe-table.asciidoc b/x-pack/docs/en/sql/language/syntax/describe-table.asciidoc new file mode 100644 index 0000000000000..114def470b181 --- /dev/null +++ b/x-pack/docs/en/sql/language/syntax/describe-table.asciidoc @@ -0,0 +1,20 @@ +[[sql-syntax-describe-table]] +=== DESCRIBE TABLE + +.Synopsis +[source, sql] +---- +DESCRIBE table +---- + +or + +[source, sql] +---- +DESC table +---- + + +.Description + +`DESC` and `DESCRIBE` are aliases to <>. diff --git a/x-pack/docs/en/sql/language/syntax/index.asciidoc b/x-pack/docs/en/sql/language/syntax/index.asciidoc new file mode 100644 index 0000000000000..e0e970edae14b --- /dev/null +++ b/x-pack/docs/en/sql/language/syntax/index.asciidoc @@ -0,0 +1,16 @@ +[[sql-commands]] +== SQL Commands + +This section contains the list of SQL commands supported by {es-sql} along with their syntax: + +<>:: Describe a table. +<>:: Retrieve rows from zero or more tables. +<>:: List columns in table. +<>:: List supported functions. +<>:: List tables available. + +include::describe-table.asciidoc[] +include::select.asciidoc[] +include::show-columns.asciidoc[] +include::show-functions.asciidoc[] +include::show-tables.asciidoc[] diff --git a/x-pack/docs/en/sql/language/syntax/select.asciidoc b/x-pack/docs/en/sql/language/syntax/select.asciidoc new file mode 100644 index 0000000000000..f39cbc0c2f8ca --- /dev/null +++ b/x-pack/docs/en/sql/language/syntax/select.asciidoc @@ -0,0 +1,284 @@ +[[sql-syntax-select]] +=== SELECT + +.Synopsis +[source, sql] +---- +SELECT select_expr [, ...] +[ FROM table_name ] +[ WHERE condition ] +[ GROUP BY grouping_element [, ...] ] +[ HAVING condition] +[ ORDER BY expression [ ASC | DESC ] [, ...] ] +[ LIMIT [ count ] ] +---- + +.Description + +Retrieves rows from zero or more tables. + +The general execution of `SELECT` is as follows: + +. All elements in the `FROM` list are computed (each element can be base or alias table). Currently `FROM` supports exactly one table. Do note however that the table name can be a pattern (see <> below). +. If the `WHERE` clause is specified, all rows that do not satisfy the condition are eliminated from the output. (See <> below.) +. If the `GROUP BY` clause is specified, or if there are aggregate function calls, the output is combined into groups of rows that match on one or more values, and the results of aggregate functions are computed. If the `HAVING` clause is present, it eliminates groups that do not satisfy the given condition. (See <> and <> below.) +. The actual output rows are computed using the `SELECT` output expressions for each selected row or row group. +. If the `ORDER BY` clause is specified, the returned rows are sorted in the specified order. If `ORDER BY` is not given, the rows are returned in whatever order the system finds fastest to produce. (See <> below.) +. If the `LIMIT` is specified, the `SELECT` statement only returns a subset of the result rows. (See <> below.) + + +[[sql-syntax-select-list]] +==== `SELECT` List + +`SELECT` list, namely the expressions between `SELECT` and `FROM`, represent the output rows of the `SELECT` statement. + +As with a table, every output column of a `SELECT` has a name which can be either specified per column through the `AS` keyword : + +[source,sql] +---- +SELECT column AS c +---- + +assigned by {es-sql} if no name is given: + +[source,sql] +---- +SELECT 1 + 1 +---- + +or if it's a simple column reference, use its name as the column name: + +[source,sql] +---- +SELECT col FROM table +---- + +[[sql-syntax-select-wildcard]] +==== Wildcard + +To select all the columns in the source, one can use `*`: + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/select.sql-spec[wildcardWithOrder] +-------------------------------------------------- + +which essentially returns all columsn found. + +[[sql-syntax-from]] +[float] +==== FROM Clause + +The `FROM` clause specifies one table for the `SELECT` and has the following syntax: + +[source, sql] +---- +FROM table_name [ [ AS ] alias ] +---- + +where: + +`table_name`:: + +Represents the name (optionally qualified) of an existing table, either a concrete or base one (actual index) or alias. +If the table name contains special SQL characters (such as `.`,`-`,etc...) use double quotes to escape them: +[source, sql] +---- +SELECT ... FROM "some-table" +---- + +The name can be a <> pointing to multiple indices (likely requiring quoting as mentioned above) with the restriction that *all* resolved concrete tables have **exact mapping**. + +`alias`:: +A substitute name for the `FROM` item containing the alias. An alias is used for brevity or to eliminate ambiguity. When an alias is provided, it completely hides the actual name of the table and must be used in its place. + +[[sql-syntax-where]] +[float] +==== WHERE Clause + +The optional `WHERE` clause is used to filter rows from the query and has the following syntax: + +[source, sql] +---- +WHERE condition +---- + +where: + +`condition`:: + +Represents an expression that evaluates to a `boolean`. Only the rows that match the condition (to `true`) are returned. + +[[sql-syntax-group-by]] +[float] +==== GROUP BY + +The `GROUP BY` clause is used to divide the results into groups of rows on matching values from the designated columns. It has the following syntax: + +[source, sql] +---- +GROUP BY grouping_element [, ...] +---- + +where: + +`grouping_element`:: + +Represents an expression on which rows are being grouped _on_. It can be a column name, name or ordinal number of a column or an arbitrary expression of column values. + +When a `GROUP BY` clause is used in a `SELECT`, _all_ output expressions must be either aggregate functions or expressions used for grouping or derivates of (otherwise there would be more than one possible value to return for each ungrouped column). + +[[sql-syntax-having]] +[float] +==== HAVING + +The `HAVING` clause can be used _only_ along aggregate functions (and thus `GROUP BY`) to filter what groups are kept or not and has the following syntax: + +[source, sql] +---- +GROUP BY condition +---- + +where: + +`condition`:: + +Represents an expression that evaluates to a `boolean`. Only groups that match the condition (to `true`) are returned. + +Both `WHERE` and `HAVING` are used for filtering however there are several differences between them: + +. `WHERE` works on individual *rows*, `HAVING` works on the *groups* created by ``GROUP BY`` +. `WHERE` is evaluated *before* grouping, `HAVING` is evaluated *after* grouping + +Note that it is possible to have a `HAVING` clause without a ``GROUP BY``. In this case, an __implicit grouping__ is applied, meaning all selected rows are considered to form a single group and `HAVING` can be applied on any of the aggregate functions specified on this group. ` +As such a query emits only a single row (as there is only a single group), `HAVING` condition returns either one row (the group) or zero if the condition fails. + +[[sql-syntax-order-by]] +[float] +==== ORDER BY + +The `ORDER BY` clause is used to sort the results of `SELECT` by one or more expressions: + +[source, sql] +---- +ORDER BY expression [ ASC | DESC ] [, ...] +---- + +where: + +`expression`:: + +Represents an input column, an output column or an ordinal number of the position (starting from one) of an output column. Additionally, ordering can be done based on the results _score_ ` +The direction, if not specified, is by default `ASC` (ascending). ` +Regardless of the ordering specified, null values are ordered last (at the end). + +IMPORTANT: When used along-side, `GROUP BY` expression can point _only_ to the columns used for grouping. + +For example, the following query sorts by an arbitrary input field (`page_count`): + +[source,js] +-------------------------------------------------- +POST /_xpack/sql?format=txt +{ + "query": "SELECT * FROM library ORDER BY page_count DESC LIMIT 5" +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:library] + +which results in something like: + +[source,text] +-------------------------------------------------- + author | name | page_count | release_date +-----------------`--------------------`---------------`------------------------ +Peter F. Hamilton|Pandora's Star |768 |2004-03-02T00:00:00.000Z +Vernor Vinge |A Fire Upon the Deep|613 |1992-06-01T00:00:00.000Z +Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +Alastair Reynolds|Revelation Space |585 |2000-03-15T00:00:00.000Z +James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00.000Z +-------------------------------------------------- +// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/] +// TESTRESPONSE[_cat] + +[[sql-syntax-order-by-score]] +==== Order By Score + +When doing full-text queries in the `WHERE` clause, results can be returned based on their +{defguide}/relevance-intro.html[score] or _relevance_ to the given query. + +NOTE: When doing multiple text queries in the `WHERE` clause then, their scores will be +combined using the same rules as {es}'s +<>. + +To sort based on the `score`, use the special function `SCORE()`: + +[source,js] +-------------------------------------------------- +POST /_xpack/sql?format=txt +{ + "query": "SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY SCORE() DESC" +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:library] + +Which results in something like: + +[source,text] +-------------------------------------------------- + SCORE() | author | name | page_count | release_date +---------------`---------------`-------------------`---------------`------------------------ +2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z +1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z +1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z +-------------------------------------------------- +// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/ s/\(/\\\(/ s/\)/\\\)/] +// TESTRESPONSE[_cat] + +Note that you can return `SCORE()` by adding it to the where clause. This +is possible even if you are not sorting by `SCORE()`: + +[source,js] +-------------------------------------------------- +POST /_xpack/sql?format=txt +{ + "query": "SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY page_count DESC" +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:library] + +[source,text] +-------------------------------------------------- + SCORE() | author | name | page_count | release_date +---------------`---------------`-------------------`---------------`------------------------ +2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z +1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z +1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z +-------------------------------------------------- +// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/ s/\(/\\\(/ s/\)/\\\)/] +// TESTRESPONSE[_cat] + +NOTE: +Trying to return `score` from a non full-text queries will return the same value for all results, as +all are equilley relevant. + +[[sql-syntax-limit]] +[float] +==== LIMIT + +The `LIMIT` clause restricts (limits) the number of rows returns using the format: + +[source, sql] +---- +LIMIT ( count | ALL ) +---- + +where + +count:: is a positive integer or zero indicating the maximum *possible* number of results being returned (as there might be less matches than the limit). If `0` is specified, no results are returned. + +ALL:: indicates there is no limit and thus all results are being returned. diff --git a/x-pack/docs/en/sql/language/syntax/show-columns.asciidoc b/x-pack/docs/en/sql/language/syntax/show-columns.asciidoc new file mode 100644 index 0000000000000..2e7c8f7bfca69 --- /dev/null +++ b/x-pack/docs/en/sql/language/syntax/show-columns.asciidoc @@ -0,0 +1,12 @@ +[[sql-syntax-show-columns]] +=== SHOW COLUMNS + +.Synopsis +[source, sql] +---- +SHOW COLUMNS [ FROM | IN ] ? table +---- + +.Description + +List the columns in table and their data type (and other attributes). diff --git a/x-pack/docs/en/sql/language/syntax/show-functions.asciidoc b/x-pack/docs/en/sql/language/syntax/show-functions.asciidoc new file mode 100644 index 0000000000000..197b9e8cb3b79 --- /dev/null +++ b/x-pack/docs/en/sql/language/syntax/show-functions.asciidoc @@ -0,0 +1,14 @@ +[[sql-syntax-show-functions]] +=== SHOW FUNCTIONS + +.Synopsis +[source, sql] +---- +SHOW FUNCTIONS [ LIKE? pattern<1>? ]? +---- + +<1> SQL match pattern + +.Description + +List all the SQL functions and their type. The `LIKE` clause can be used to restrict the list of names to the given pattern. diff --git a/x-pack/docs/en/sql/language/syntax/show-tables.asciidoc b/x-pack/docs/en/sql/language/syntax/show-tables.asciidoc new file mode 100644 index 0000000000000..9266b6d58058b --- /dev/null +++ b/x-pack/docs/en/sql/language/syntax/show-tables.asciidoc @@ -0,0 +1,14 @@ +[[sql-syntax-show-tables]] +=== SHOW TABLES + +.Synopsis +[source, sql] +---- +SHOW TABLES [ LIKE? pattern<1>? ]? +---- + +<1> SQL match pattern + +.Description + +List the tables available to the current user and their type. The `LIKE` clause can be used to restrict the list of names to the given pattern. diff --git a/x-pack/docs/en/sql/security.asciidoc b/x-pack/docs/en/sql/security.asciidoc new file mode 100644 index 0000000000000..bba73a2a4de6d --- /dev/null +++ b/x-pack/docs/en/sql/security.asciidoc @@ -0,0 +1,37 @@ +[[sql-security]] +== Security + +{es-sql} integrates with security, if this is enabled on your cluster. +In such a scenario, {es-sql} supports both security at the transport layer (by encrypting the communication between the consumer and the server) and authentication (for the access layer). + +[float] +==== SSL/TLS configuration + +In case of an encrypted transport, the SSL/TLS support needs to be enabled in {es-sql} to properly establish communication with {es}. This is done by setting the `ssl` property to `true` or by using the `https` prefix in the URL. + +Depending on your SSL configuration (whether the certificates are signed by a CA or not, whether they are global at JVM level or just local to one application), might require setting up the `keystore` and/or `truststore`, that is where the _credentials_ are stored (`keystore` - which typically stores private keys and certificates) and how to _verify_ them (`truststore` - which typically stores certificates from third party also known as CA - certificate authorities). + +Typically (and again, do note that your environment might differ significantly), if the SSL setup for {es-sql} is not already done at the JVM level, one needs to setup the keystore if the {es-sql} security requires client authentication (PKI - Public Key Infrastructure), and setup `truststore` if SSL is enabled. + +[float] +==== Authentication + +The authentication support in {es-sql} is of two types: + +Username/Password:: Set these through `user` and `password` properties. +PKI/X.509:: Use X.509 certificates to authenticate {es-sql} to {es}. For this, one would need to setup the `keystore` containing the private key and certificate to the appropriate user (configured in {es}) and the `truststore` with the CA certificate used to sign the SSL/TLS certificates in the {es} cluster. That is, one should setup the key to authenticate {es-sql} and also to verify that is the right one. To do so, one should set the `ssl.keystore.location` and `ssl.truststore.location` properties to indicate the `keystore` and `truststore` to use. It is recommended to have these secured through a password in which case `ssl.keystore.pass` and `ssl.truststore.pass` properties are required. + +[float] +[[sql-security-permissions]] +==== Permissions (server-side) +Lastly, one the server one need to add a few permissions to +users so they can run SQL. To run SQL a user needs `read` and +`indices:admin/get` permissions at minimum while some parts of +the API require `cluster:monitor/main`. + +The following example configures a role that can run SQL in JDBC querying the `test` and `bort` +indices: + +["source","yaml",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-tests}/security/roles.yml[cli_jdbc] +-------------------------------------------------- + From bd06563e78688fa3a94a8b17285a0a6c7565fbb9 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 21 Jun 2018 10:08:50 -0700 Subject: [PATCH 60/92] [DOCS] Creates field and document level security overview (#30937) --- ...field-and-document-access-control.asciidoc | 404 +----------------- .../authorization/role-templates.asciidoc | 71 +++ .../authorization/set-security-user.asciidoc | 61 +++ 3 files changed, 140 insertions(+), 396 deletions(-) create mode 100644 x-pack/docs/en/security/authorization/role-templates.asciidoc create mode 100644 x-pack/docs/en/security/authorization/set-security-user.asciidoc diff --git a/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc b/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc index a1aa44895c6a6..119a090232c2f 100644 --- a/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc +++ b/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc @@ -3,9 +3,11 @@ === Setting up field and document level security You can control access to data within an index by adding field and document level -security permissions to a role. Field level security permissions restrict access -to particular fields within a document. Document level security permissions -restrict access to particular documents within an index. +security permissions to a role. +<> restrict access to +particular fields within a document. +<> restrict access +to particular documents within an index. NOTE: Document and field level security is currently meant to operate with read-only privileged accounts. Users with document and field level @@ -23,399 +25,6 @@ grant wider access than intended. Each user has a single set of field level and document level permissions per index. See <>. ===================================================================== -[[field-level-security]] -==== Field level security - -To enable field level security, specify the fields that each role can access -as part of the indices permissions in a role definition. Field level security is -thus bound to a well-defined set of indices (and potentially a set of -<>). - -The following role definition grants read access only to the `category`, -`@timestamp`, and `message` fields in all the `events-*` indices. - -[source,js] --------------------------------------------------- -{ - "indices": [ - { - "names": [ "events-*" ], - "privileges": [ "read" ], - "field_security" : { - "grant" : [ "category", "@timestamp", "message" ] - } - } - ] -} --------------------------------------------------- - -Access to the following meta fields is always allowed: `_id`, -`_type`, `_parent`, `_routing`, `_timestamp`, `_ttl`, `_size` and `_index`. If -you specify an empty list of fields, only these meta fields are accessible. - -NOTE: Omitting the fields entry entirely disables field-level security. - -You can also specify field expressions. For example, the following -example grants read access to all fields that start with an `event_` prefix: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant" : [ "event_*" ] - } - } - ] -} --------------------------------------------------- - -Use the dot notations to refer to nested fields in more complex documents. For -example, assuming the following document: - -[source,js] --------------------------------------------------- -{ - "customer": { - "handle": "Jim", - "email": "jim@mycompany.com", - "phone": "555-555-5555" - } -} --------------------------------------------------- - -The following role definition enables only read access to the customer `handle` -field: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant" : [ "customer.handle" ] - } - } - ] -} --------------------------------------------------- - -This is where wildcard support shines. For example, use `customer.*` to enable -only read access to the `customer` data: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant" : [ "customer.*" ] - } - } - ] -} --------------------------------------------------- - -You can deny permission to access fields with the following syntax: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant" : [ "*"], - "except": [ "customer.handle" ] - } - } - ] -} --------------------------------------------------- - - -The following rules apply: - -* The absence of `field_security` in a role is equivalent to * access. -* If permission has been granted explicitly to some fields, you can specify -denied fields. The denied fields must be a subset of the fields to which -permissions were granted. -* Defining denied and granted fields implies access to all granted fields except -those which match the pattern in the denied fields. - -For example: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "except": [ "customer.handle" ], - "grant" : [ "customer.*" ] - } - } - ] -} --------------------------------------------------- - -In the above example, users can read all fields with the prefix "customer." -except for "customer.handle". - -An empty array for `grant` (for example, `"grant" : []`) means that access has -not been granted to any fields. - -===== Field Level Security and Roles - -When a user has several roles that specify field level permissions, the -resulting field level permissions per index are the union of the individual role -permissions. For example, if these two roles are merged: - -[source,js] --------------------------------------------------- -{ - // role 1 - ... - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant": [ "a.*" ], - "except" : [ "a.b*" ] - } - } - ] -} - -{ - // role 2 - ... - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant": [ "a.b*" ], - "except" : [ "a.b.c*" ] - } - } - ] -} --------------------------------------------------- - -The resulting permission is equal to: - -[source,js] --------------------------------------------------- -{ - // role 1 + role 2 - ... - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant": [ "a.*" ], - "except" : [ "a.b.c*" ] - } - } - ] -} --------------------------------------------------- - - -[[document-level-security]] -==== Document level security - -Document level security restricts the documents that users have read access to. -To enable document level security, specify a query that matches all the -accessible documents as part of the indices permissions within a role definition. -Document level security is thus bound to a well defined set of indices. - -Enabling document level security restricts which documents can be accessed from -any document-based read API. To enable document level security, you use a query -to specify the documents that each role can access in the `roles.yml` file. -You specify the document query with the `query` option. The document query is -associated with a particular index or index pattern and operates in conjunction -with the privileges specified for the indices. - -The following role definition grants read access only to documents that -belong to the `click` category within all the `events-*` indices: - -[source,js] --------------------------------------------------- -{ - "indices": [ - { - "names": [ "events-*" ], - "privileges": [ "read" ], - "query": "{\"match\": {\"category\": \"click\"}}" - } - ] -} --------------------------------------------------- - -NOTE: Omitting the `query` entry entirely disables document level security for - the respective indices permission entry. - -The specified `query` expects the same format as if it was defined in the -search request and supports the full {es} {ref}/query-dsl.html[Query DSL]. - -For example, the following role grants read access only to the documents whose -`department_id` equals `12`: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "query" : { - "term" : { "department_id" : 12 } - } - } - ] -} --------------------------------------------------- - -NOTE: `query` also accepts queries written as string values. - -[[templating-role-query]] -===== Templating a role query - -You can use Mustache templates in a role query to insert the username of the -current authenticated user into the role. Like other places in {es} that support -templating or scripting, you can specify inline, stored, or file-based templates -and define custom parameters. You access the details for the current -authenticated user through the `_user` parameter. - -For example, the following role query uses a template to insert the username -of the current authenticated user: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "my_index" ], - "privileges" : [ "read" ], - "query" : { - "template" : { - "source" : { - "term" : { "acl.username" : "{{_user.username}}" } - } - } - } - } - ] -} --------------------------------------------------- - -You can access the following information through the `_user` variable: - -[options="header"] -|====== -| Property | Description -| `_user.username` | The username of the current authenticated user. -| `_user.full_name` | If specified, the full name of the current authenticated user. -| `_user.email` | If specified, the email of the current authenticated user. -| `_user.roles` | If associated, a list of the role names of the current authenticated user. -| `_user.metadata` | If specified, a hash holding custom metadata of the current authenticated user. -|====== - -You can also access custom user metadata. For example, if you maintain a -`group_id` in your user metadata, you can apply document level security -based on the `group.id` field in your documents: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "my_index" ], - "privileges" : [ "read" ], - "query" : { - "template" : { - "source" : { - "term" : { "group.id" : "{{_user.metadata.group_id}}" } - } - } - } - } - ] -} --------------------------------------------------- - -[[set-security-user-processor]] -===== Set security user ingest processor - -If an index is shared by many small users it makes sense to put all these users -into the same index. Having a dedicated index or shard per user is wasteful. -To guarantee that a user reads only their own documents, it makes sense to set up -document level security. In this scenario, each document must have the username -or role name associated with it, so that this information can be used by the -role query for document level security. This is a situation where the -`set_security_user` ingest processor can help. - -NOTE: Document level security doesn't apply to write APIs. You must use unique -ids for each user that uses the same index, otherwise they might overwrite other -users' documents. The ingest processor just adds properties for the current -authenticated user to the documents that are being indexed. - -The `set_security_user` processor attaches user-related details (such as -`username`, `roles`, `email`, `full_name` and `metadata` ) from the current -authenticated user to the current document by pre-processing the ingest. When -you index data with an ingest pipeline, user details are automatically attached -to the document. For example: - -[source,js] --------------------------------------------------- -PUT shared-logs/log/1?pipeline=my_pipeline_id -{ - ... -} --------------------------------------------------- - -Read the {ref}/ingest.html[ingest docs] for more information -about setting up a pipeline and other processors. - -[[set-security-user-options]] -.Set Security User Options -[options="header"] -|====== -| Name | Required | Default | Description -| `field` | yes | - | The field to store the user information into. -| `properties` | no | [`username`, `roles`, `email`, `full_name`, `metadata`] | Controls what user related properties are added to the `field`. -|====== - -The following example adds all user details for the current authenticated user -to the `user` field for all documents that are processed by this pipeline: - -[source,js] --------------------------------------------------- -{ - "processors" : [ - { - "set_security_user": { - "field": "user" - } - } - ] -} --------------------------------------------------- - [[multiple-roles-dls-fls]] ==== Multiple roles with document and field level security @@ -447,3 +56,6 @@ fields. If you need to restrict access to both documents and fields, consider splitting documents by index instead. + +include::role-templates.asciidoc[] +include::set-security-user.asciidoc[] diff --git a/x-pack/docs/en/security/authorization/role-templates.asciidoc b/x-pack/docs/en/security/authorization/role-templates.asciidoc new file mode 100644 index 0000000000000..1bad73a5d1e94 --- /dev/null +++ b/x-pack/docs/en/security/authorization/role-templates.asciidoc @@ -0,0 +1,71 @@ +[[templating-role-query]] +==== Templating a role query + +When you create a role, you can specify a query that defines the +<>. You can +optionally use Mustache templates in the role query to insert the username of the +current authenticated user into the role. Like other places in {es} that support +templating or scripting, you can specify inline, stored, or file-based templates +and define custom parameters. You access the details for the current +authenticated user through the `_user` parameter. + +For example, the following role query uses a template to insert the username +of the current authenticated user: + +[source,js] +-------------------------------------------------- +POST /_xpack/security/role/example1 +{ + "indices" : [ + { + "names" : [ "my_index" ], + "privileges" : [ "read" ], + "query" : { + "template" : { + "source" : { + "term" : { "acl.username" : "{{_user.username}}" } + } + } + } + } + ] +} +-------------------------------------------------- +// CONSOLE + +You can access the following information through the `_user` variable: + +[options="header"] +|====== +| Property | Description +| `_user.username` | The username of the current authenticated user. +| `_user.full_name` | If specified, the full name of the current authenticated user. +| `_user.email` | If specified, the email of the current authenticated user. +| `_user.roles` | If associated, a list of the role names of the current authenticated user. +| `_user.metadata` | If specified, a hash holding custom metadata of the current authenticated user. +|====== + +You can also access custom user metadata. For example, if you maintain a +`group_id` in your user metadata, you can apply document level security +based on the `group.id` field in your documents: + +[source,js] +-------------------------------------------------- +POST /_xpack/security/role/example2 +{ + "indices" : [ + { + "names" : [ "my_index" ], + "privileges" : [ "read" ], + "query" : { + "template" : { + "source" : { + "term" : { "group.id" : "{{_user.metadata.group_id}}" } + } + } + } + } + ] +} +-------------------------------------------------- +// CONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/security/authorization/set-security-user.asciidoc b/x-pack/docs/en/security/authorization/set-security-user.asciidoc new file mode 100644 index 0000000000000..92b9ae275aec8 --- /dev/null +++ b/x-pack/docs/en/security/authorization/set-security-user.asciidoc @@ -0,0 +1,61 @@ +[[set-security-user-processor]] +==== Pre-processing documents to add security details + +// If an index is shared by many small users it makes sense to put all these users +// into the same index. Having a dedicated index or shard per user is wasteful. +// TBD: It's unclear why we're putting users in an index here. + +To guarantee that a user reads only their own documents, it makes sense to set up +document level security. In this scenario, each document must have the username +or role name associated with it, so that this information can be used by the +role query for document level security. This is a situation where the +`set_security_user` ingest processor can help. + +NOTE: Document level security doesn't apply to write APIs. You must use unique +ids for each user that uses the same index, otherwise they might overwrite other +users' documents. The ingest processor just adds properties for the current +authenticated user to the documents that are being indexed. + +The `set_security_user` processor attaches user-related details (such as +`username`, `roles`, `email`, `full_name` and `metadata` ) from the current +authenticated user to the current document by pre-processing the ingest. When +you index data with an ingest pipeline, user details are automatically attached +to the document. For example: + +[source,js] +-------------------------------------------------- +PUT shared-logs/log/1?pipeline=my_pipeline_id +{ + ... +} +-------------------------------------------------- +// NOTCONSOLE + +For more information about setting up a pipeline and other processors, see +{ref}/ingest.html[ingest node]. + +[[set-security-user-options]] +.Set Security User Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to store the user information into. +| `properties` | no | [`username`, `roles`, `email`, `full_name`, `metadata`] | Controls what user related properties are added to the `field`. +|====== + +The following example adds all user details for the current authenticated user +to the `user` field for all documents that are processed by this pipeline: + +[source,js] +-------------------------------------------------- +{ + "processors" : [ + { + "set_security_user": { + "field": "user" + } + } + ] +} +-------------------------------------------------- +// NOTCONSOLE \ No newline at end of file From 6f3e97f2b7536e9d092ba80cb71d4ff3f7557734 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 21 Jun 2018 13:24:39 -0400 Subject: [PATCH 61/92] Test: Skip assertion on windows Windows doesn't provide consistent exception messages when it can't connect so skip the exception message assertion on windows. Closes #31457 --- .../client/RestClientMultipleHostsIntegTests.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java index d09741ea25b6c..7f5915fe3529d 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java @@ -42,9 +42,7 @@ import static org.elasticsearch.client.RestClientTestUtil.getAllStatusCodes; import static org.elasticsearch.client.RestClientTestUtil.randomErrorNoRetryStatusCode; import static org.elasticsearch.client.RestClientTestUtil.randomOkStatusCode; -import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -216,8 +214,10 @@ public void testNodeSelector() throws IOException { restClient.performRequest(request); fail("expected to fail to connect"); } catch (ConnectException e) { - // This is different in windows and linux but this matches both. - assertThat(e.getMessage(), startsWith("Connection refused")); + // Windows isn't consistent here. Sometimes the message is even null! + if (false == System.getProperty("os.name").startsWith("Windows")) { + assertEquals("Connection refused", e.getMessage()); + } } } else { Response response = restClient.performRequest(request); From 048a92bf39beecef1fa33e820699d79bdcd9133b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 21 Jun 2018 13:50:46 -0400 Subject: [PATCH 62/92] Rename createNewTranslog to fileBasedRecovery (#31508) We renamed `createNewTranslog` to `fileBasedRecovery` in the RecoveryTarget but did not do this for RecoverySourceHandler. This commit makes sure that we a consistent parameter in both recovery source and target. --- .../elasticsearch/indices/recovery/RecoverySourceHandler.java | 4 ++-- .../indices/recovery/RecoverySourceHandlerTests.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 72a6fcb6ba329..45500349865f7 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -449,13 +449,13 @@ public void phase1(final IndexCommit snapshot, final Supplier translogO } } - void prepareTargetForTranslog(final boolean createNewTranslog, final int totalTranslogOps) throws IOException { + void prepareTargetForTranslog(final boolean fileBasedRecovery, final int totalTranslogOps) throws IOException { StopWatch stopWatch = new StopWatch().start(); logger.trace("recovery [phase1]: prepare remote engine for translog"); final long startEngineStart = stopWatch.totalTime().millis(); // Send a request preparing the new shard's translog to receive operations. This ensures the shard engine is started and disables // garbage collection (not the JVM's GC!) of tombstone deletes. - cancellableThreads.executeIO(() -> recoveryTarget.prepareForTranslogOperations(createNewTranslog, totalTranslogOps)); + cancellableThreads.executeIO(() -> recoveryTarget.prepareForTranslogOperations(fileBasedRecovery, totalTranslogOps)); stopWatch.stop(); response.startTime = stopWatch.totalTime().millis() - startEngineStart; diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 5ade55ef5340c..6be6d7e80bccb 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -423,7 +423,7 @@ public void phase1(final IndexCommit snapshot, final Supplier translogO } @Override - void prepareTargetForTranslog(final boolean createNewTranslog, final int totalTranslogOps) throws IOException { + void prepareTargetForTranslog(final boolean fileBasedRecovery, final int totalTranslogOps) throws IOException { prepareTargetForTranslogCalled.set(true); } From 60204af0cbcf43c83e86b4ddc8426111139f4838 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 21 Jun 2018 11:13:19 -0700 Subject: [PATCH 63/92] [DOCS] Remove fixed file from build.gradle --- x-pack/docs/build.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 9abca910c5dfc..6c0a4bfcac647 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -11,7 +11,6 @@ apply plugin: 'elasticsearch.docs-test' buildRestTests.expectedUnconvertedCandidates = [ 'en/rest-api/watcher/put-watch.asciidoc', 'en/security/authentication/user-cache.asciidoc', - 'en/security/authorization/field-and-document-access-control.asciidoc', 'en/security/authorization/run-as-privilege.asciidoc', 'en/security/ccs-clients-integrations/http.asciidoc', 'en/security/authorization/custom-roles-provider.asciidoc', From 4f9332ee16bcc422144a866122eb8bc2df2c0040 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 21 Jun 2018 11:25:26 -0700 Subject: [PATCH 64/92] Core: Remove ThreadPool from base TransportAction (#31492) Most transport actions don't need the node ThreadPool. This commit removes the ThreadPool as a super constructor parameter for TransportAction. The actions that do need the thread pool then have a member added to keep it from their own constructor. --- .../action/bulk/TransportNoopBulkAction.java | 8 +++---- .../search/TransportNoopSearchAction.java | 9 +++----- .../ingest/common/GrokProcessorGetAction.java | 6 ++--- .../TransportMultiSearchTemplateAction.java | 5 ++--- .../TransportSearchTemplateAction.java | 10 ++++----- .../painless/PainlessExecuteAction.java | 5 ++--- .../rankeval/TransportRankEvalAction.java | 5 ++--- .../reindex/TransportDeleteByQueryAction.java | 9 +++++--- .../index/reindex/TransportReindexAction.java | 5 +++-- .../reindex/TransportRethrottleAction.java | 4 ++-- .../reindex/TransportUpdateByQueryAction.java | 5 ++++- .../cancel/TransportCancelTasksAction.java | 4 ++-- .../tasks/get/TransportGetTaskAction.java | 4 +++- .../tasks/list/TransportListTasksAction.java | 4 ++-- .../remote/TransportRemoteInfoAction.java | 5 ++--- .../TransportClearIndicesCacheAction.java | 4 ++-- .../indices/flush/TransportFlushAction.java | 8 +++---- .../flush/TransportSyncedFlushAction.java | 5 ++--- .../forcemerge/TransportForceMergeAction.java | 4 ++-- .../get/TransportGetFieldMappingsAction.java | 5 ++--- .../recovery/TransportRecoveryAction.java | 4 ++-- .../refresh/TransportRefreshAction.java | 5 ++--- .../TransportIndicesSegmentsAction.java | 4 ++-- .../stats/TransportIndicesStatsAction.java | 4 ++-- .../get/TransportUpgradeStatusAction.java | 4 ++-- .../upgrade/post/TransportUpgradeAction.java | 4 ++-- .../query/TransportValidateQueryAction.java | 4 ++-- .../action/bulk/TransportBulkAction.java | 4 +++- .../action/bulk/TransportShardBulkAction.java | 2 ++ .../TransportFieldCapabilitiesAction.java | 4 +++- .../action/get/TransportMultiGetAction.java | 5 ++--- .../SimulatePipelineTransportAction.java | 2 +- .../action/main/TransportMainAction.java | 5 ++--- .../search/TransportClearScrollAction.java | 5 ++--- .../search/TransportMultiSearchAction.java | 7 ++++-- .../action/search/TransportSearchAction.java | 5 +++-- .../search/TransportSearchScrollAction.java | 5 ++--- .../support/HandledTransportAction.java | 22 +++++++++---------- .../action/support/TransportAction.java | 6 +---- .../broadcast/TransportBroadcastAction.java | 4 ++-- .../node/TransportBroadcastByNodeAction.java | 8 ++----- .../master/TransportMasterNodeAction.java | 9 ++++---- .../support/nodes/TransportNodesAction.java | 4 +++- .../TransportBroadcastReplicationAction.java | 5 ++--- .../TransportReplicationAction.java | 4 +++- ...ransportInstanceSingleOperationAction.java | 5 ++++- .../shard/TransportSingleShardAction.java | 4 +++- .../support/tasks/TransportTasksAction.java | 4 ++-- .../TransportMultiTermVectorsAction.java | 5 ++--- .../action/ActionModuleTests.java | 5 ++--- .../node/tasks/TaskManagerTestCase.java | 5 ++--- .../cluster/node/tasks/TestTaskPlugin.java | 9 +++----- .../node/tasks/TransportTasksActionTests.java | 8 +++---- .../action/main/MainActionTests.java | 4 +--- .../TransportActionFilterChainTests.java | 4 ++-- .../TransportBroadcastByNodeActionTests.java | 2 +- .../BroadcastReplicationTests.java | 10 ++++----- .../client/node/NodeClientHeadersTests.java | 2 +- .../persistent/TestPersistentTasksPlugin.java | 8 +++---- .../core/action/TransportXPackInfoAction.java | 11 +++++----- .../TransportGetCertificateInfoAction.java | 6 ++--- .../action/TransportXPackInfoActionTests.java | 9 ++++---- .../action/TransportGraphExploreAction.java | 8 +++---- .../ml/action/TransportCloseJobAction.java | 4 +++- .../action/TransportDeleteCalendarAction.java | 9 ++++---- .../TransportDeleteCalendarEventAction.java | 8 +++---- .../TransportDeleteExpiredDataAction.java | 4 +++- .../action/TransportDeleteFilterAction.java | 7 +++--- .../TransportDeleteModelSnapshotAction.java | 6 ++--- .../ml/action/TransportFlushJobAction.java | 4 ++-- .../ml/action/TransportForecastJobAction.java | 4 ++-- .../ml/action/TransportGetBucketsAction.java | 11 +++++----- .../TransportGetCalendarEventsAction.java | 7 +++--- .../action/TransportGetCalendarsAction.java | 5 ++--- .../action/TransportGetCategoriesAction.java | 9 ++++---- .../ml/action/TransportGetFiltersAction.java | 5 ++--- .../action/TransportGetInfluencersAction.java | 11 +++++----- .../action/TransportGetJobsStatsAction.java | 4 ++-- .../TransportGetModelSnapshotsAction.java | 5 ++--- .../TransportGetOverallBucketsAction.java | 4 +++- .../ml/action/TransportGetRecordsAction.java | 11 +++++----- .../TransportIsolateDatafeedAction.java | 7 +++--- .../ml/action/TransportJobTaskAction.java | 7 +++--- .../ml/action/TransportKillProcessAction.java | 7 +++--- .../ml/action/TransportMlInfoAction.java | 5 ++--- .../ml/action/TransportPersistJobAction.java | 4 ++-- .../TransportPostCalendarEventsAction.java | 5 ++--- .../ml/action/TransportPostDataAction.java | 4 ++-- .../TransportPreviewDatafeedAction.java | 4 +++- .../ml/action/TransportPutCalendarAction.java | 7 +++--- .../ml/action/TransportPutFilterAction.java | 10 ++++----- .../action/TransportStopDatafeedAction.java | 4 +++- .../TransportUpdateCalendarJobAction.java | 5 ++--- .../TransportUpdateModelSnapshotAction.java | 5 ++--- .../action/TransportUpdateProcessAction.java | 7 +++--- .../TransportValidateDetectorAction.java | 10 ++++----- .../TransportValidateJobConfigAction.java | 10 ++++----- .../action/TransportMonitoringBulkAction.java | 4 +++- .../action/TransportGetRollupCapsAction.java | 5 ++--- .../action/TransportGetRollupJobAction.java | 4 ++-- .../action/TransportRollupSearchAction.java | 4 ++-- .../action/TransportStartRollupAction.java | 4 ++-- .../action/TransportStopRollupAction.java | 4 ++-- .../role/TransportDeleteRoleAction.java | 5 ++--- .../action/role/TransportGetRolesAction.java | 5 ++--- .../action/role/TransportPutRoleAction.java | 5 ++--- .../TransportDeleteRoleMappingAction.java | 5 ++--- .../TransportGetRoleMappingsAction.java | 13 +++++------ .../TransportPutRoleMappingAction.java | 5 ++--- .../saml/TransportSamlAuthenticateAction.java | 4 +++- .../TransportSamlInvalidateSessionAction.java | 5 ++--- .../saml/TransportSamlLogoutAction.java | 5 ++--- ...nsportSamlPrepareAuthenticationAction.java | 9 ++++---- .../token/TransportCreateTokenAction.java | 4 +++- .../token/TransportInvalidateTokenAction.java | 5 ++--- .../token/TransportRefreshTokenAction.java | 5 ++--- .../user/TransportAuthenticateAction.java | 9 ++++---- .../user/TransportChangePasswordAction.java | 5 ++--- .../user/TransportDeleteUserAction.java | 9 ++++---- .../action/user/TransportGetUsersAction.java | 5 ++--- .../user/TransportHasPrivilegesAction.java | 4 +++- .../action/user/TransportPutUserAction.java | 5 ++--- .../user/TransportSetEnabledAction.java | 4 +++- .../role/TransportDeleteRoleActionTests.java | 7 +++--- .../role/TransportGetRolesActionTests.java | 9 ++++---- .../role/TransportPutRoleActionTests.java | 10 +++------ .../TransportGetRoleMappingsActionTests.java | 16 ++++++-------- .../TransportPutRoleMappingActionTests.java | 16 ++++++-------- ...sportSamlInvalidateSessionActionTests.java | 3 +-- .../saml/TransportSamlLogoutActionTests.java | 2 +- .../TransportAuthenticateActionTests.java | 7 +++--- .../TransportChangePasswordActionTests.java | 9 ++++---- .../user/TransportDeleteUserActionTests.java | 16 ++++++-------- .../user/TransportGetUsersActionTests.java | 12 +++++----- .../user/TransportPutUserActionTests.java | 12 +++++----- .../plugin/TransportSqlClearCursorAction.java | 5 ++--- .../sql/plugin/TransportSqlQueryAction.java | 5 ++--- .../plugin/TransportSqlTranslateAction.java | 9 +++----- .../actions/WatcherTransportAction.java | 4 ++-- .../actions/ack/TransportAckWatchAction.java | 5 ++--- .../TransportActivateWatchAction.java | 8 +++---- .../delete/TransportDeleteWatchAction.java | 10 ++++----- .../execute/TransportExecuteWatchAction.java | 4 +++- .../actions/get/TransportGetWatchAction.java | 5 ++--- .../actions/put/TransportPutWatchAction.java | 4 +++- .../ack/TransportAckWatchActionTests.java | 2 +- 146 files changed, 415 insertions(+), 483 deletions(-) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java index 0c1065ad13145..0f6748b5e826c 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java @@ -19,8 +19,8 @@ package org.elasticsearch.plugin.noop.action.bulk; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; @@ -30,7 +30,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; public class TransportNoopBulkAction extends HandledTransportAction { @@ -38,9 +37,8 @@ public class TransportNoopBulkAction extends HandledTransportAction { @Inject - public TransportNoopSearchAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters) { - super(settings, NoopSearchAction.NAME, threadPool, transportService, actionFilters, - (Writeable.Reader) SearchRequest::new); + public TransportNoopSearchAction(Settings settings, TransportService transportService, ActionFilters actionFilters) { + super(settings, NoopSearchAction.NAME, transportService, actionFilters, (Writeable.Reader) SearchRequest::new); } @Override diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index 77ad363b50680..85a8f5e48079c 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -41,7 +41,6 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestBuilderListener; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -114,9 +113,8 @@ public void writeTo(StreamOutput out) throws IOException { public static class TransportAction extends HandledTransportAction { @Inject - public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters) { - super(settings, NAME, threadPool, transportService, actionFilters, Request::new); + public TransportAction(Settings settings, TransportService transportService, ActionFilters actionFilters) { + super(settings, NAME, transportService, actionFilters, Request::new); } @Override diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java index 7451c89cdb494..79fea3c6d62cd 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; @@ -45,10 +44,10 @@ public class TransportMultiSearchTemplateAction extends HandledTransportAction) SearchTemplateRequest::new); + public TransportSearchTemplateAction(Settings settings, TransportService transportService, ActionFilters actionFilters, + ScriptService scriptService, NamedXContentRegistry xContentRegistry, NodeClient client) { + super(settings, SearchTemplateAction.NAME, transportService, actionFilters, + (Supplier) SearchTemplateRequest::new); this.scriptService = scriptService; this.xContentRegistry = xContentRegistry; this.client = client; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index ea0664b2aa446..1bfd013b0d5a5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -48,7 +48,6 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -280,9 +279,9 @@ public static class TransportAction extends HandledTransportAction) RankEvalRequest::new); this.scriptService = scriptService; this.namedXContentRegistry = namedXContentRegistry; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java index 35aa8d77d104e..9be54f4f76104 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.reindex; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -35,7 +33,11 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.function.Supplier; + public class TransportDeleteByQueryAction extends HandledTransportAction { + + private final ThreadPool threadPool; private final Client client; private final ScriptService scriptService; private final ClusterService clusterService; @@ -43,8 +45,9 @@ public class TransportDeleteByQueryAction extends HandledTransportAction) DeleteByQueryRequest::new); + this.threadPool = threadPool; this.client = client; this.scriptService = scriptService; this.clusterService = clusterService; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 3db3a0d2a9123..62be1e2cb613a 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -92,6 +92,7 @@ public class TransportReindexAction extends HandledTransportAction> REMOTE_CLUSTER_WHITELIST = Setting.listSetting("reindex.remote.whitelist", emptyList(), Function.identity(), Property.NodeScope); + private final ThreadPool threadPool; private final ClusterService clusterService; private final ScriptService scriptService; private final AutoCreateIndex autoCreateIndex; @@ -103,8 +104,8 @@ public class TransportReindexAction extends HandledTransportAction { + + private final ThreadPool threadPool; private final Client client; private final ScriptService scriptService; private final ClusterService clusterService; @@ -53,8 +55,9 @@ public class TransportUpdateByQueryAction extends HandledTransportAction) UpdateByQueryRequest::new); + this.threadPool = threadPool; this.client = client; this.scriptService = scriptService; this.clusterService = clusterService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index 3bd451538f0a3..b99630dd4f960 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -62,9 +62,9 @@ public class TransportCancelTasksAction extends TransportTasksAction */ public class TransportGetTaskAction extends HandledTransportAction { + private final ThreadPool threadPool; private final ClusterService clusterService; private final TransportService transportService; private final Client client; @@ -72,7 +73,8 @@ public class TransportGetTaskAction extends HandledTransportAction) RemoteInfoRequest::new); this.remoteClusterService = searchTransportService.getRemoteClusterService(); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index eda82fb710ca0..4609f048caa83 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -49,10 +49,10 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAc private final IndicesService indicesService; @Inject - public TransportClearIndicesCacheAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportClearIndicesCacheAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, ClearIndicesCacheAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, ClearIndicesCacheAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, ClearIndicesCacheRequest::new, ThreadPool.Names.MANAGEMENT, false); this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java index 91755388320a3..7df54c1f123a1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.List; @@ -39,11 +38,10 @@ public class TransportFlushAction extends TransportBroadcastReplicationAction { @Inject - public TransportFlushAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, + public TransportFlushAction(Settings settings, ClusterService clusterService, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportShardFlushAction replicatedFlushAction) { - super(FlushAction.NAME, FlushRequest::new, settings, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedFlushAction); + super(FlushAction.NAME, FlushRequest::new, settings, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedFlushAction); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java index fb4928ab0d4d3..9762fe6cbb814 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.flush.SyncedFlushService; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; /** @@ -38,9 +37,9 @@ public class TransportSyncedFlushAction extends HandledTransportAction) SyncedFlushRequest::new); this.syncedFlushService = syncedFlushService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java index 94f27a93624d5..94357575a9f72 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java @@ -48,10 +48,10 @@ public class TransportForceMergeAction extends TransportBroadcastByNodeAction { @Inject - public TransportRefreshAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportRefreshAction(Settings settings, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportShardRefreshAction shardRefreshAction) { - super(RefreshAction.NAME, RefreshRequest::new, settings, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, shardRefreshAction); + super(RefreshAction.NAME, RefreshRequest::new, settings, clusterService, transportService, actionFilters, indexNameExpressionResolver, shardRefreshAction); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java index 94b12c9ab17d5..6b624e6baa792 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java @@ -46,9 +46,9 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastByNodeActi private final IndicesService indicesService; @Inject - public TransportIndicesSegmentsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + public TransportIndicesSegmentsAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, IndicesSegmentsAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + super(settings, IndicesSegmentsAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, IndicesSegmentsRequest::new, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index 6f2aaa063011f..9668a1a41fac5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -47,10 +47,10 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction< private final IndicesService indicesService; @Inject - public TransportIndicesStatsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportIndicesStatsAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, IndicesStatsAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + super(settings, IndicesStatsAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, IndicesStatsRequest::new, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java index 19566acaf7af4..603b25f6ab414 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java @@ -48,9 +48,9 @@ public class TransportUpgradeStatusAction extends TransportBroadcastByNodeAction private final IndicesService indicesService; @Inject - public TransportUpgradeStatusAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + public TransportUpgradeStatusAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, UpgradeStatusAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + super(settings, UpgradeStatusAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, UpgradeStatusRequest::new, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java index 0bc2134cb505a..dda4a5203ff68 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -62,10 +62,10 @@ public class TransportUpgradeAction extends TransportBroadcastByNodeAction { + private final ThreadPool threadPool; private final AutoCreateIndex autoCreateIndex; private final ClusterService clusterService; private final IngestService ingestService; @@ -108,8 +109,9 @@ public TransportBulkAction(Settings settings, ThreadPool threadPool, TransportSe TransportShardBulkAction shardBulkAction, NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, AutoCreateIndex autoCreateIndex, LongSupplier relativeTimeProvider) { - super(settings, BulkAction.NAME, threadPool, transportService, actionFilters, BulkRequest::new); + super(settings, BulkAction.NAME, transportService, actionFilters, BulkRequest::new); Objects.requireNonNull(relativeTimeProvider); + this.threadPool = threadPool; this.clusterService = clusterService; this.ingestService = ingestService; this.shardBulkAction = shardBulkAction; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index a1f0965d110b2..7fc58b667c579 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -76,6 +76,7 @@ public class TransportShardBulkAction extends TransportWriteAction { + private final ThreadPool threadPool; private final ClusterService clusterService; private final TransportFieldCapabilitiesIndexAction shardAction; private final RemoteClusterService remoteClusterService; @@ -53,7 +54,8 @@ public TransportFieldCapabilitiesAction(Settings settings, TransportService tran ClusterService clusterService, ThreadPool threadPool, TransportFieldCapabilitiesIndexAction shardAction, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, FieldCapabilitiesAction.NAME, threadPool, transportService, actionFilters, FieldCapabilitiesRequest::new); + super(settings, FieldCapabilitiesAction.NAME, transportService, actionFilters, FieldCapabilitiesRequest::new); + this.threadPool = threadPool; this.clusterService = clusterService; this.remoteClusterService = transportService.getRemoteClusterService(); this.shardAction = shardAction; diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java index 0e54539d885c4..f7ad0f6c87fd0 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -44,10 +43,10 @@ public class TransportMultiGetAction extends HandledTransportAction) SimulatePipelineRequest::new); this.pipelineStore = nodeService.getIngestService().getPipelineStore(); this.executionService = new SimulateExecutionService(threadPool); diff --git a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java index 18e704be69c24..48612a68901dd 100644 --- a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; public class TransportMainAction extends HandledTransportAction { @@ -38,9 +37,9 @@ public class TransportMainAction extends HandledTransportAction { @@ -34,10 +33,10 @@ public class TransportClearScrollAction extends HandledTransportAction { private final int availableProcessors; + private final ThreadPool threadPool; private final ClusterService clusterService; private final LongSupplier relativeTimeProvider; private final NodeClient client; @@ -49,7 +50,8 @@ public class TransportMultiSearchAction extends HandledTransportAction SHARD_COUNT_LIMIT_SETTING = Setting.longSetting( "action.search.shard_count.limit", Long.MAX_VALUE, 1L, Property.Dynamic, Property.NodeScope); + private final ThreadPool threadPool; private final ClusterService clusterService; private final SearchTransportService searchTransportService; private final RemoteClusterService remoteClusterService; @@ -82,8 +83,8 @@ public TransportSearchAction(Settings settings, ThreadPool threadPool, Transport SearchTransportService searchTransportService, SearchPhaseController searchPhaseController, ClusterService clusterService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, SearchAction.NAME, threadPool, transportService, actionFilters, - (Writeable.Reader) SearchRequest::new); + super(settings, SearchAction.NAME, transportService, actionFilters, (Writeable.Reader) SearchRequest::new); + this.threadPool = threadPool; this.searchPhaseController = searchPhaseController; this.searchTransportService = searchTransportService; this.remoteClusterService = searchTransportService.getRemoteClusterService(); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java index 77425ecd5dbb2..953152eaad003 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import static org.elasticsearch.action.search.ParsedScrollId.QUERY_AND_FETCH_TYPE; @@ -41,10 +40,10 @@ public class TransportSearchScrollAction extends HandledTransportAction) SearchScrollRequest::new); this.clusterService = clusterService; this.searchTransportService = searchTransportService; diff --git a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index d6febf828765b..7cdcd017b9946 100644 --- a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -37,29 +37,27 @@ */ public abstract class HandledTransportAction extends TransportAction { - protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, - Supplier request) { - this(settings, actionName, true, threadPool, transportService, actionFilters, request); + protected HandledTransportAction(Settings settings, String actionName, TransportService transportService, + ActionFilters actionFilters, Supplier request) { + this(settings, actionName, true, transportService, actionFilters, request); } - protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, + protected HandledTransportAction(Settings settings, String actionName, TransportService transportService, ActionFilters actionFilters, Writeable.Reader requestReader) { - this(settings, actionName, true, threadPool, transportService, actionFilters, requestReader); + this(settings, actionName, true, transportService, actionFilters, requestReader); } - protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - Supplier request) { - super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); + protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, + TransportService transportService, ActionFilters actionFilters, Supplier request) { + super(settings, actionName, actionFilters, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, request, ThreadPool.Names.SAME, false, canTripCircuitBreaker, new TransportHandler()); } - protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, ThreadPool threadPool, + protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, TransportService transportService, ActionFilters actionFilters, Writeable.Reader requestReader) { - super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); + super(settings, actionName, actionFilters, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, ThreadPool.Names.SAME, false, canTripCircuitBreaker, requestReader, new TransportHandler()); } diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java index c8d9849c2e58a..85167cfe0f8e9 100644 --- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -29,21 +29,17 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskListener; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.threadpool.ThreadPool; import java.util.concurrent.atomic.AtomicInteger; public abstract class TransportAction extends AbstractComponent { - protected final ThreadPool threadPool; protected final String actionName; private final ActionFilter[] filters; protected final TaskManager taskManager; - protected TransportAction(Settings settings, String actionName, ThreadPool threadPool, ActionFilters actionFilters, - TaskManager taskManager) { + protected TransportAction(Settings settings, String actionName, ActionFilters actionFilters, TaskManager taskManager) { super(settings); - this.threadPool = threadPool; this.actionName = actionName; this.filters = actionFilters.filters(); this.taskManager = taskManager; diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index ff8012f8e37fb..8a28c2c9d891d 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -58,10 +58,10 @@ public abstract class TransportBroadcastAction request, Supplier shardRequest, String shardExecutor) { - super(settings, actionName, threadPool, transportService, actionFilters, request); + super(settings, actionName, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; this.indexNameExpressionResolver = indexNameExpressionResolver; diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index ca50e2acd147e..dac1a55b6361f 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -88,21 +88,18 @@ public abstract class TransportBroadcastByNodeAction request, String executor) { - this(settings, actionName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, request, - executor, true); + this(settings, actionName, clusterService, transportService, actionFilters, indexNameExpressionResolver, request, executor, true); } public TransportBroadcastByNodeAction( Settings settings, String actionName, - ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, @@ -110,8 +107,7 @@ public TransportBroadcastByNodeAction( Supplier request, String executor, boolean canTripCircuitBreaker) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, - request); + super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 1881db0f13e42..8f198c4b82e6f 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -54,6 +54,7 @@ * A base class for operations that needs to be performed on the master node. */ public abstract class TransportMasterNodeAction, Response extends ActionResponse> extends HandledTransportAction { + protected final ThreadPool threadPool; protected final TransportService transportService; protected final ClusterService clusterService; protected final IndexNameExpressionResolver indexNameExpressionResolver; @@ -75,10 +76,10 @@ protected TransportMasterNodeAction(Settings settings, String actionName, Transp protected TransportMasterNodeAction(Settings settings, String actionName, boolean canTripCircuitBreaker, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Supplier request) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, - request); + super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request); this.transportService = transportService; this.clusterService = clusterService; + this.threadPool = threadPool; this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor(); } @@ -87,10 +88,10 @@ protected TransportMasterNodeAction(Settings settings, String actionName, boolea TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, Writeable.Reader request, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, request - ); + super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request); this.transportService = transportService; this.clusterService = clusterService; + this.threadPool = threadPool; this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor(); } diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index d47e156680e28..7a074c91c7152 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -54,6 +54,7 @@ public abstract class TransportNodesAction extends HandledTransportAction { + protected final ThreadPool threadPool; protected final ClusterService clusterService; protected final TransportService transportService; protected final Class nodeResponseClass; @@ -64,7 +65,8 @@ protected TransportNodesAction(Settings settings, String actionName, ThreadPool ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, Supplier request, Supplier nodeRequest, String nodeExecutor, Class nodeResponseClass) { - super(settings, actionName, threadPool, transportService, actionFilters, request); + super(settings, actionName, transportService, actionFilters, request); + this.threadPool = threadPool; this.clusterService = Objects.requireNonNull(clusterService); this.transportService = Objects.requireNonNull(transportService); this.nodeResponseClass = Objects.requireNonNull(nodeResponseClass); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index d3d54880f504f..50e0cc3af7f7b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -38,7 +38,6 @@ import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; @@ -58,10 +57,10 @@ public abstract class TransportBroadcastReplicationAction request, Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportBroadcastReplicationAction(String name, Supplier request, Settings settings, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportReplicationAction replicatedBroadcastShardAction) { - super(settings, name, threadPool, transportService, actionFilters, request); + super(settings, name, transportService, actionFilters, request); this.replicatedBroadcastShardAction = replicatedBroadcastShardAction; this.clusterService = clusterService; this.indexNameExpressionResolver = indexNameExpressionResolver; diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 97f985806168b..d7c908bf9fa5b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -100,6 +100,7 @@ public abstract class TransportReplicationAction< Response extends ReplicationResponse > extends TransportAction { + protected final ThreadPool threadPool; protected final TransportService transportService; protected final ClusterService clusterService; protected final ShardStateAction shardStateAction; @@ -132,7 +133,8 @@ protected TransportReplicationAction(Settings settings, String actionName, Trans IndexNameExpressionResolver indexNameExpressionResolver, Supplier request, Supplier replicaRequest, String executor, boolean syncGlobalCheckpointAfterOperation) { - super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); + super(settings, actionName, actionFilters, transportService.getTaskManager()); + this.threadPool = threadPool; this.transportService = transportService; this.clusterService = clusterService; this.indicesService = indicesService; diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index c907c12ac5161..280a35207a9db 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -50,6 +50,8 @@ public abstract class TransportInstanceSingleOperationAction, Response extends ActionResponse> extends HandledTransportAction { + + protected final ThreadPool threadPool; protected final ClusterService clusterService; protected final TransportService transportService; protected final IndexNameExpressionResolver indexNameExpressionResolver; @@ -60,7 +62,8 @@ public abstract class TransportInstanceSingleOperationAction request) { - super(settings, actionName, threadPool, transportService, actionFilters, request); + super(settings, actionName, transportService, actionFilters, request); + this.threadPool = threadPool; this.clusterService = clusterService; this.transportService = transportService; this.indexNameExpressionResolver = indexNameExpressionResolver; diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 6c5d55c8c4404..d7e5633559d8a 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -60,6 +60,7 @@ */ public abstract class TransportSingleShardAction, Response extends ActionResponse> extends TransportAction { + protected final ThreadPool threadPool; protected final ClusterService clusterService; protected final TransportService transportService; protected final IndexNameExpressionResolver indexNameExpressionResolver; @@ -70,7 +71,8 @@ public abstract class TransportSingleShardAction request, String executor) { - super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); + super(settings, actionName, actionFilters, transportService.getTaskManager()); + this.threadPool = threadPool; this.clusterService = clusterService; this.transportService = transportService; this.indexNameExpressionResolver = indexNameExpressionResolver; diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index f852b5efb1aa3..5599dd5f98b06 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -77,10 +77,10 @@ public abstract class TransportTasksAction< protected final String transportNodeAction; - protected TransportTasksAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, + protected TransportTasksAction(Settings settings, String actionName, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, Supplier requestSupplier, Supplier responseSupplier, String nodeExecutor) { - super(settings, actionName, threadPool, transportService, actionFilters, requestSupplier); + super(settings, actionName, transportService, actionFilters, requestSupplier); this.clusterService = clusterService; this.transportService = transportService; this.transportNodeAction = actionName + "[n]"; diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java index 9a3fc7b84c287..f66d843ea6db4 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -45,10 +44,10 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction { - protected FakeTransportAction(Settings settings, String actionName, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, TaskManager taskManager) { - super(settings, actionName, threadPool, actionFilters, taskManager); + protected FakeTransportAction(Settings settings, String actionName, ActionFilters actionFilters, TaskManager taskManager) { + super(settings, actionName, actionFilters, taskManager); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 4cb9cd27e7fc7..20f4987008c53 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -192,9 +192,8 @@ protected TaskManager createTaskManager(Settings settings, ThreadPool threadPool clusterService = createClusterService(threadPool, discoveryNode.get()); clusterService.addStateApplier(transportService.getTaskManager()); ActionFilters actionFilters = new ActionFilters(emptySet()); - transportListTasksAction = new TransportListTasksAction(settings, threadPool, clusterService, transportService, actionFilters); - transportCancelTasksAction = new TransportCancelTasksAction(settings, threadPool, clusterService, - transportService, actionFilters); + transportListTasksAction = new TransportListTasksAction(settings, clusterService, transportService, actionFilters); + transportCancelTasksAction = new TransportCancelTasksAction(settings, clusterService, transportService, actionFilters); transportService.acceptIncomingRequests(); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index 0cfe532b8a012..a04c8d93c3a8c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -424,12 +424,9 @@ public static class TransportUnblockTestTasksAction extends TransportTasksAction UnblockTestTasksResponse, UnblockTestTaskResponse> { @Inject - public TransportUnblockTestTasksAction(Settings settings,ThreadPool threadPool, ClusterService - clusterService, - TransportService transportService) { - super(settings, UnblockTestTasksAction.NAME, threadPool, clusterService, transportService, new ActionFilters(new - HashSet<>()), - UnblockTestTasksRequest::new, UnblockTestTasksResponse::new, ThreadPool.Names.MANAGEMENT); + public TransportUnblockTestTasksAction(Settings settings, ClusterService clusterService, TransportService transportService) { + super(settings, UnblockTestTasksAction.NAME, clusterService, transportService, new ActionFilters(new HashSet<>()), + UnblockTestTasksRequest::new, UnblockTestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 33b815e4fbf22..9175bc69bf642 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -254,9 +254,9 @@ public void writeTo(StreamOutput out) throws IOException { */ abstract static class TestTasksAction extends TransportTasksAction { - protected TestTasksAction(Settings settings, String actionName, ThreadPool threadPool, + protected TestTasksAction(Settings settings, String actionName, ClusterService clusterService, TransportService transportService) { - super(settings, actionName, threadPool, clusterService, transportService, new ActionFilters(new HashSet<>()), + super(settings, actionName, clusterService, transportService, new ActionFilters(new HashSet<>()), TestTasksRequest::new, TestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } @@ -622,7 +622,7 @@ public void testTaskLevelActionFailures() throws ExecutionException, Interrupted for (int i = 0; i < testNodes.length; i++) { final int node = i; // Simulate task action that fails on one of the tasks on one of the nodes - tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", threadPool, testNodes[i].clusterService, + tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", testNodes[i].clusterService, testNodes[i].transportService) { @Override protected void taskOperation(TestTasksRequest request, Task task, ActionListener listener) { @@ -701,7 +701,7 @@ public void testTaskNodeFiltering() throws ExecutionException, InterruptedExcept final int node = i; // Simulate a task action that works on all nodes except nodes listed in filterNodes. // We are testing that it works. - tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", threadPool, + tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", testNodes[i].clusterService, testNodes[i].transportService) { @Override diff --git a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java index 1c1c0f9476de3..654a4a3649c35 100644 --- a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.Collections; @@ -68,8 +67,7 @@ public void testMainActionClusterAvailable() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportMainAction action = new TransportMainAction(settings, mock(ThreadPool.class), transportService, mock(ActionFilters.class), - clusterService); + TransportMainAction action = new TransportMainAction(settings, transportService, mock(ActionFilters.class), clusterService); AtomicReference responseRef = new AtomicReference<>(); action.doExecute(new MainRequest(), new ActionListener() { @Override diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 3a31422dcf83f..479ed2ad60d51 100644 --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -80,7 +80,7 @@ public void testActionFiltersRequest() throws ExecutionException, InterruptedExc String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = - new TransportAction(Settings.EMPTY, actionName, null, actionFilters, + new TransportAction(Settings.EMPTY, actionName, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override protected void doExecute(TestRequest request, ActionListener listener) { @@ -158,7 +158,7 @@ public void exe String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = new TransportAction(Settings.EMPTY, - actionName, null, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { + actionName, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override protected void doExecute(TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 6a7d443553888..61beb59bc0c24 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -118,7 +118,7 @@ class TestTransportBroadcastByNodeAction extends TransportBroadcastByNodeAction< private final Map shards = new HashMap<>(); TestTransportBroadcastByNodeAction(Settings settings, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Supplier request, String executor) { - super(settings, "indices:admin/test", THREAD_POOL, TransportBroadcastByNodeActionTests.this.clusterService, transportService, actionFilters, indexNameExpressionResolver, request, executor); + super(settings, "indices:admin/test", TransportBroadcastByNodeActionTests.this.clusterService, transportService, actionFilters, indexNameExpressionResolver, request, executor); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index f3033b017db98..bfa45bb072dcf 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -100,7 +100,7 @@ threadPool, BigArrays.NON_RECYCLING_INSTANCE, circuitBreakerService, new NamedWr TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet()); transportService.start(); transportService.acceptIncomingRequests(); - broadcastReplicationAction = new TestBroadcastReplicationAction(Settings.EMPTY, threadPool, clusterService, transportService, + broadcastReplicationAction = new TestBroadcastReplicationAction(Settings.EMPTY, clusterService, transportService, new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), null); } @@ -206,10 +206,10 @@ public void testShardsList() throws InterruptedException, ExecutionException { private class TestBroadcastReplicationAction extends TransportBroadcastReplicationAction { protected final Set>> capturedShardRequests = ConcurrentCollections.newConcurrentSet(); - TestBroadcastReplicationAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, - TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - TransportReplicationAction replicatedBroadcastShardAction) { - super("test-broadcast-replication-action", DummyBroadcastRequest::new, settings, threadPool, clusterService, transportService, + TestBroadcastReplicationAction(Settings settings, ClusterService clusterService, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + TransportReplicationAction replicatedBroadcastShardAction) { + super("test-broadcast-replication-action", DummyBroadcastRequest::new, settings, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedBroadcastShardAction); } diff --git a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index a289e9680b4aa..f473188a5424b 100644 --- a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -59,7 +59,7 @@ private Actions(Settings settings, ThreadPool threadPool, Action[] actions) { private static class InternalTransportAction extends TransportAction { private InternalTransportAction(Settings settings, String actionName, ThreadPool threadPool) { - super(settings, actionName, threadPool, EMPTY_FILTERS, new TaskManager(settings, threadPool, Collections.emptySet())); + super(settings, actionName, EMPTY_FILTERS, new TaskManager(settings, threadPool, Collections.emptySet())); } @Override diff --git a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java index e54641bef2f54..745b883656958 100644 --- a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java +++ b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java @@ -34,7 +34,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; @@ -511,10 +510,9 @@ public static class TransportTestTaskAction extends TransportTasksAction { @Inject - public TransportTestTaskAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, String nodeExecutor) { - super(settings, TestTaskAction.NAME, threadPool, clusterService, transportService, actionFilters, + public TransportTestTaskAction(Settings settings, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters) { + super(settings, TestTaskAction.NAME, clusterService, transportService, actionFilters, TestTasksRequest::new, TestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java index 7acbfa49368de..23dd0e12d44ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java @@ -10,15 +10,14 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackInfoResponse; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.license.XPackInfoResponse; +import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.license.XPackInfoResponse.LicenseInfo; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackBuild; import org.elasticsearch.xpack.core.XPackFeatureSet; -import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; -import org.elasticsearch.license.XPackInfoResponse.LicenseInfo; import java.util.Set; import java.util.stream.Collectors; @@ -29,9 +28,9 @@ public class TransportXPackInfoAction extends HandledTransportAction featureSets; @Inject - public TransportXPackInfoAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportXPackInfoAction(Settings settings, TransportService transportService, ActionFilters actionFilters, LicenseService licenseService, Set featureSets) { - super(settings, XPackInfoAction.NAME, threadPool, transportService, actionFilters, + super(settings, XPackInfoAction.NAME, transportService, actionFilters, XPackInfoRequest::new); this.licenseService = licenseService; this.featureSets = featureSets; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java index 16e2a74dac81a..a70d0693d5b37 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.ssl.cert.CertificateInfo; @@ -25,10 +24,9 @@ public class TransportGetCertificateInfoAction extends HandledTransportAction null, null, Collections.emptySet()); - TransportXPackInfoAction action = new TransportXPackInfoAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), licenseService, featureSets); + TransportXPackInfoAction action = new TransportXPackInfoAction(Settings.EMPTY, transportService, + mock(ActionFilters.class), licenseService, featureSets); License license = mock(License.class); long expiryDate = randomLong(); diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 1c1dfb476da7d..d45dd640a49ff 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -65,6 +65,7 @@ */ public class TransportGraphExploreAction extends HandledTransportAction { + private final ThreadPool threadPool; private final NodeClient client; protected final XPackLicenseState licenseState; @@ -83,10 +84,9 @@ protected boolean lessThan(Vertex a, Vertex b) { @Inject public TransportGraphExploreAction(Settings settings, ThreadPool threadPool, NodeClient client, - TransportService transportService, ActionFilters actionFilters, - XPackLicenseState licenseState) { - super(settings, GraphExploreAction.NAME, threadPool, transportService, actionFilters, - (Supplier)GraphExploreRequest::new); + TransportService transportService, ActionFilters actionFilters, XPackLicenseState licenseState) { + super(settings, GraphExploreAction.NAME, transportService, actionFilters, (Supplier)GraphExploreRequest::new); + this.threadPool = threadPool; this.client = client; this.licenseState = licenseState; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java index bc1d50c7cd99d..05810b943befb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java @@ -57,6 +57,7 @@ public class TransportCloseJobAction extends TransportTasksAction { + private final ThreadPool threadPool; private final Client client; private final ClusterService clusterService; private final Auditor auditor; @@ -67,8 +68,9 @@ public TransportCloseJobAction(Settings settings, TransportService transportServ ClusterService clusterService, Client client, Auditor auditor, PersistentTasksService persistentTasksService) { // We fork in innerTaskOperation(...), so we can use ThreadPool.Names.SAME here: - super(settings, CloseJobAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, CloseJobAction.NAME, clusterService, transportService, actionFilters, CloseJobAction.Request::new, CloseJobAction.Response::new, ThreadPool.Names.SAME); + this.threadPool = threadPool; this.client = client; this.clusterService = clusterService; this.auditor = auditor; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java index 0346e38deb2fa..38d88341ce3de 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; @@ -20,7 +18,6 @@ import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction; @@ -28,6 +25,8 @@ import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import java.util.function.Supplier; + import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -38,9 +37,9 @@ public class TransportDeleteCalendarAction extends HandledTransportAction) DeleteCalendarAction.Request::new); this.client = client; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java index 2e4b688fa2619..7b2311eba2d2d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarEventAction; @@ -41,10 +40,9 @@ public class TransportDeleteCalendarEventAction extends HandledTransportAction { + private final ThreadPool threadPool; private final Client client; private final ClusterService clusterService; @Inject public TransportDeleteExpiredDataAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, Client client, ClusterService clusterService) { - super(settings, DeleteExpiredDataAction.NAME, threadPool, transportService, actionFilters, DeleteExpiredDataAction.Request::new); + super(settings, DeleteExpiredDataAction.NAME, transportService, actionFilters, DeleteExpiredDataAction.Request::new); + this.threadPool = threadPool; this.client = ClientHelper.clientWithOrigin(client, ClientHelper.ML_ORIGIN); this.clusterService = clusterService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java index 4987c028696c6..79693e2279486 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java @@ -20,11 +20,10 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.DeleteFilterAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.action.DeleteFilterAction; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; @@ -44,9 +43,9 @@ public class TransportDeleteFilterAction extends HandledTransportAction) DeleteFilterAction.Request::new); this.clusterService = clusterService; this.client = client; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java index 23ca3693df632..ad22f84f6d468 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -37,10 +36,9 @@ public class TransportDeleteModelSnapshotAction extends HandledTransportAction { @Inject - public TransportFlushJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + public TransportFlushJobAction(Settings settings, TransportService transportService, ClusterService clusterService, ActionFilters actionFilters, AutodetectProcessManager processManager) { - super(settings, FlushJobAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, FlushJobAction.NAME, clusterService, transportService, actionFilters, FlushJobAction.Request::new, FlushJobAction.Response::new, ThreadPool.Names.SAME, processManager); // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java index 8b287db50381c..f42f7003b909c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java @@ -42,10 +42,10 @@ public class TransportForecastJobAction extends TransportJobTaskAction { private final JobProvider jobProvider; @@ -27,9 +26,9 @@ public class TransportGetBucketsAction extends HandledTransportAction) GetBucketsAction.Request::new); this.jobProvider = jobProvider; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java index da2d2d7970fc6..5647c72d44bd2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.GetCalendarEventsAction; @@ -20,9 +19,9 @@ import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.persistence.ScheduledEventsQueryBuilder; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.ScheduledEventsQueryBuilder; import java.util.Collections; import java.util.List; @@ -35,9 +34,9 @@ public class TransportGetCalendarEventsAction extends HandledTransportAction) GetCalendarEventsAction.Request::new); this.jobProvider = jobProvider; this.clusterService = clusterService; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java index 5645d1e1f2d26..46c252004a3c5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetCalendarsAction; import org.elasticsearch.xpack.core.ml.action.util.PageParams; @@ -26,9 +25,9 @@ public class TransportGetCalendarsAction extends HandledTransportAction { private final JobProvider jobProvider; @@ -26,9 +25,9 @@ public class TransportGetCategoriesAction extends HandledTransportAction) GetCategoriesAction.Request::new); this.jobProvider = jobProvider; this.client = client; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index 28d75956df059..1be7be4a5d2b3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.GetFiltersAction; @@ -49,9 +48,9 @@ public class TransportGetFiltersAction extends HandledTransportAction { private final JobProvider jobProvider; @@ -27,9 +26,9 @@ public class TransportGetInfluencersAction extends HandledTransportAction) GetInfluencersAction.Request::new); this.jobProvider = jobProvider; this.client = client; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index c0b383b55ced0..1182953dfc31e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -53,10 +53,10 @@ public class TransportGetJobsStatsAction extends TransportTasksAction) GetOverallBucketsAction.Request::new); + this.threadPool = threadPool; this.clusterService = clusterService; this.client = client; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java index 7c5fee97d5647..6943cd9a01c5e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java @@ -5,20 +5,19 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; -import org.elasticsearch.xpack.ml.job.persistence.RecordsQueryBuilder; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.RecordsQueryBuilder; + +import java.util.function.Supplier; public class TransportGetRecordsAction extends HandledTransportAction { @@ -27,9 +26,9 @@ public class TransportGetRecordsAction extends HandledTransportAction) GetRecordsAction.Request::new); this.jobProvider = jobProvider; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java index 0d3b8dfa38dbe..398a1007ff9c4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java @@ -18,12 +18,11 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.IsolateDatafeedAction; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -33,9 +32,9 @@ public class TransportIsolateDatafeedAction extends TransportTasksAction { @Inject - public TransportIsolateDatafeedAction(Settings settings, TransportService transportService, ThreadPool threadPool, + public TransportIsolateDatafeedAction(Settings settings, TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(settings, IsolateDatafeedAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, IsolateDatafeedAction.NAME, clusterService, transportService, actionFilters, IsolateDatafeedAction.Request::new, IsolateDatafeedAction.Response::new, MachineLearning.UTILITY_THREAD_POOL_NAME); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java index 881a5e7cc5b4b..bd489588da3c3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java @@ -15,13 +15,12 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.JobTaskRequest; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; @@ -39,11 +38,11 @@ public abstract class TransportJobTaskAction requestSupplier, Supplier responseSupplier, String nodeExecutor, AutodetectProcessManager processManager) { - super(settings, actionName, threadPool, clusterService, transportService, actionFilters, + super(settings, actionName, clusterService, transportService, actionFilters, requestSupplier, responseSupplier, nodeExecutor); this.processManager = processManager; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java index cc2f70eadeae5..40cec95fae211 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java @@ -15,14 +15,13 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.KillProcessAction; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.notifications.Auditor; @@ -34,10 +33,10 @@ public class TransportKillProcessAction extends TransportJobTaskAction) MlInfoAction.Request::new); this.clusterService = clusterService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java index 1fbbb7a368152..926395d65132c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java @@ -29,9 +29,9 @@ public class TransportPersistJobAction extends TransportJobTaskAction { @Inject - public TransportPersistJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + public TransportPersistJobAction(Settings settings, TransportService transportService, ClusterService clusterService, ActionFilters actionFilters, AutodetectProcessManager processManager) { - super(settings, PersistJobAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, PersistJobAction.NAME, clusterService, transportService, actionFilters, PersistJobAction.Request::new, PersistJobAction.Response::new, ThreadPool.Names.SAME, processManager); // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java index d92b4a21564c4..49cb7dc45c954 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction; @@ -44,9 +43,9 @@ public class TransportPostCalendarEventsAction extends HandledTransportAction { + private final ThreadPool threadPool; private final Client client; private final ClusterService clusterService; @Inject public TransportPreviewDatafeedAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, Client client, ClusterService clusterService) { - super(settings, PreviewDatafeedAction.NAME, threadPool, transportService, actionFilters, + super(settings, PreviewDatafeedAction.NAME, transportService, actionFilters, (Supplier) PreviewDatafeedAction.Request::new); + this.threadPool = threadPool; this.client = client; this.clusterService = clusterService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java index c135ab8322b05..82caa9a35a6c5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java @@ -20,10 +20,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; +import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -39,9 +38,9 @@ public class TransportPutCalendarAction extends HandledTransportAction) PutCalendarAction.Request::new); this.client = client; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java index a8cd2cc8134a5..cb35daef8668c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -19,13 +19,12 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.PutFilterAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; -import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.core.ml.action.PutFilterAction; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.job.JobManager; import java.io.IOException; import java.util.Collections; @@ -40,10 +39,9 @@ public class TransportPutFilterAction extends HandledTransportAction) PutFilterAction.Request::new); this.client = client; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java index e7455053d525d..cf7350a870e97 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java @@ -48,14 +48,16 @@ public class TransportStopDatafeedAction extends TransportTasksAction { + private final ThreadPool threadPool; private final PersistentTasksService persistentTasksService; @Inject public TransportStopDatafeedAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, ClusterService clusterService, PersistentTasksService persistentTasksService) { - super(settings, StopDatafeedAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, StopDatafeedAction.NAME, clusterService, transportService, actionFilters, StopDatafeedAction.Request::new, StopDatafeedAction.Response::new, MachineLearning.UTILITY_THREAD_POOL_NAME); + this.threadPool = threadPool; this.persistentTasksService = persistentTasksService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java index 8a163d5dd0cd6..fd19c7483bc05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; @@ -26,9 +25,9 @@ public class TransportUpdateCalendarJobAction extends HandledTransportAction { @Inject - public TransportUpdateProcessAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ClusterService clusterService, ActionFilters actionFilters, - AutodetectProcessManager processManager) { - super(settings, UpdateProcessAction.NAME, threadPool, clusterService, transportService, actionFilters, + public TransportUpdateProcessAction(Settings settings, TransportService transportService, ClusterService clusterService, + ActionFilters actionFilters, AutodetectProcessManager processManager) { + super(settings, UpdateProcessAction.NAME, clusterService, transportService, actionFilters, UpdateProcessAction.Request::new, UpdateProcessAction.Response::new, ThreadPool.Names.SAME, processManager); // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java index 4ae159f794895..284ae505a01e4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java @@ -5,24 +5,22 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; +import java.util.function.Supplier; + public class TransportValidateDetectorAction extends HandledTransportAction { @Inject - public TransportValidateDetectorAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters) { - super(settings, ValidateDetectorAction.NAME, threadPool, transportService, actionFilters, + public TransportValidateDetectorAction(Settings settings, TransportService transportService, ActionFilters actionFilters) { + super(settings, ValidateDetectorAction.NAME, transportService, actionFilters, (Supplier) ValidateDetectorAction.Request::new); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java index 990c673a8c1ed..dc2a8155c4d94 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java @@ -5,24 +5,22 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; +import java.util.function.Supplier; + public class TransportValidateJobConfigAction extends HandledTransportAction { @Inject - public TransportValidateJobConfigAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters) { - super(settings, ValidateJobConfigAction.NAME, threadPool, transportService, actionFilters, + public TransportValidateJobConfigAction(Settings settings, TransportService transportService, ActionFilters actionFilters) { + super(settings, ValidateJobConfigAction.NAME, transportService, actionFilters, (Supplier< ValidateJobConfigAction.Request>) ValidateJobConfigAction.Request::new); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java index b30d8b357c196..3ec30552a00b5 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java @@ -34,6 +34,7 @@ public class TransportMonitoringBulkAction extends HandledTransportAction { + private final ThreadPool threadPool; private final ClusterService clusterService; private final Exporters exportService; private final MonitoringService monitoringService; @@ -42,7 +43,8 @@ public class TransportMonitoringBulkAction extends HandledTransportAction) GetRollupCapsAction.Request::new); this.clusterService = clusterService; } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java index b0adf6f12b486..a72dbfbe6b94f 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java @@ -38,9 +38,9 @@ public class TransportGetRollupJobAction extends TransportTasksAction { @Inject - public TransportGetRollupJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + public TransportGetRollupJobAction(Settings settings, TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(settings, GetRollupJobsAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, GetRollupJobsAction.NAME, clusterService, transportService, actionFilters, GetRollupJobsAction.Request::new, GetRollupJobsAction.Response::new, ThreadPool.Names.SAME); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 48fb19bbe1552..a9f3dc5a1b786 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -84,10 +84,10 @@ public class TransportRollupSearchAction extends TransportAction { private final NativeRoleMappingStore roleMappingStore; @Inject - public TransportGetRoleMappingsAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, + public TransportGetRoleMappingsAction(Settings settings, ActionFilters actionFilters, TransportService transportService, NativeRoleMappingStore nativeRoleMappingStore) { - super(settings, GetRoleMappingsAction.NAME, threadPool, transportService, actionFilters, + super(settings, GetRoleMappingsAction.NAME, transportService, actionFilters, GetRoleMappingsRequest::new); this.roleMappingStore = nativeRoleMappingStore; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index cd65017940554..8e72a7d76e6ef 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; @@ -23,9 +22,9 @@ public class TransportPutRoleMappingAction private final NativeRoleMappingStore roleMappingStore; @Inject - public TransportPutRoleMappingAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, + public TransportPutRoleMappingAction(Settings settings, ActionFilters actionFilters, TransportService transportService, NativeRoleMappingStore roleMappingStore) { - super(settings, PutRoleMappingAction.NAME, threadPool, transportService, actionFilters, + super(settings, PutRoleMappingAction.NAME, transportService, actionFilters, PutRoleMappingRequest::new); this.roleMappingStore = roleMappingStore; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java index 2d43717b65ff3..3d0965b96aa9c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java @@ -31,6 +31,7 @@ */ public final class TransportSamlAuthenticateAction extends HandledTransportAction { + private final ThreadPool threadPool; private final AuthenticationService authenticationService; private final TokenService tokenService; @@ -38,7 +39,8 @@ public final class TransportSamlAuthenticateAction extends HandledTransportActio public TransportSamlAuthenticateAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, AuthenticationService authenticationService, TokenService tokenService) { - super(settings, SamlAuthenticateAction.NAME, threadPool, transportService, actionFilters, SamlAuthenticateRequest::new); + super(settings, SamlAuthenticateAction.NAME, transportService, actionFilters, SamlAuthenticateRequest::new); + this.threadPool = threadPool; this.authenticationService = authenticationService; this.tokenService = tokenService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java index f4a3d35376d21..778364bf5c1a5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionAction; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionRequest; @@ -47,9 +46,9 @@ public final class TransportSamlInvalidateSessionAction private final Realms realms; @Inject - public TransportSamlInvalidateSessionAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportSamlInvalidateSessionAction(Settings settings, TransportService transportService, ActionFilters actionFilters, TokenService tokenService, Realms realms) { - super(settings, SamlInvalidateSessionAction.NAME, threadPool, transportService, actionFilters, SamlInvalidateSessionRequest::new); + super(settings, SamlInvalidateSessionAction.NAME, transportService, actionFilters, SamlInvalidateSessionRequest::new); this.tokenService = tokenService; this.realms = realms; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java index 11bc64e7f1839..43873c5bcadf4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutAction; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutRequest; @@ -41,9 +40,9 @@ public final class TransportSamlLogoutAction private final TokenService tokenService; @Inject - public TransportSamlLogoutAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportSamlLogoutAction(Settings settings, TransportService transportService, ActionFilters actionFilters, Realms realms, TokenService tokenService) { - super(settings, SamlLogoutAction.NAME, threadPool, transportService, actionFilters, SamlLogoutRequest::new); + super(settings, SamlLogoutAction.NAME, transportService, actionFilters, SamlLogoutRequest::new); this.realms = realms; this.tokenService = tokenService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java index 9d1619cdd5579..58eb5ccc59ce1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java @@ -5,15 +5,12 @@ */ package org.elasticsearch.xpack.security.action.saml; -import java.util.List; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationAction; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationRequest; @@ -24,6 +21,8 @@ import org.elasticsearch.xpack.security.authc.saml.SamlUtils; import org.opensaml.saml.saml2.core.AuthnRequest; +import java.util.List; + import static org.elasticsearch.xpack.security.authc.saml.SamlRealm.findSamlRealms; /** @@ -35,9 +34,9 @@ public final class TransportSamlPrepareAuthenticationAction private final Realms realms; @Inject - public TransportSamlPrepareAuthenticationAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportSamlPrepareAuthenticationAction(Settings settings, TransportService transportService, ActionFilters actionFilters, Realms realms) { - super(settings, SamlPrepareAuthenticationAction.NAME, threadPool, transportService, actionFilters, + super(settings, SamlPrepareAuthenticationAction.NAME, transportService, actionFilters, SamlPrepareAuthenticationRequest::new); this.realms = realms; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java index 9959f0c676e85..60d3086763a09 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java @@ -32,13 +32,15 @@ public final class TransportCreateTokenAction extends HandledTransportAction { private static final String DEFAULT_SCOPE = "full"; + private final ThreadPool threadPool; private final TokenService tokenService; private final AuthenticationService authenticationService; @Inject public TransportCreateTokenAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, TokenService tokenService, AuthenticationService authenticationService) { - super(settings, CreateTokenAction.NAME, threadPool, transportService, actionFilters, CreateTokenRequest::new); + super(settings, CreateTokenAction.NAME, transportService, actionFilters, CreateTokenRequest::new); + this.threadPool = threadPool; this.tokenService = tokenService; this.authenticationService = authenticationService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java index 1c70adfb8f995..7b280087d617b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenAction; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenRequest; @@ -25,9 +24,9 @@ public final class TransportInvalidateTokenAction extends HandledTransportAction private final TokenService tokenService; @Inject - public TransportInvalidateTokenAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportInvalidateTokenAction(Settings settings, TransportService transportService, ActionFilters actionFilters, TokenService tokenService) { - super(settings, InvalidateTokenAction.NAME, threadPool, transportService, actionFilters, + super(settings, InvalidateTokenAction.NAME, transportService, actionFilters, InvalidateTokenRequest::new); this.tokenService = tokenService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java index 761fa5e8349e1..601ee944dd82b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequest; import org.elasticsearch.xpack.core.security.action.token.CreateTokenResponse; @@ -24,9 +23,9 @@ public class TransportRefreshTokenAction extends HandledTransportAction { private final SecurityContext securityContext; @Inject - public TransportAuthenticateAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportAuthenticateAction(Settings settings, TransportService transportService, ActionFilters actionFilters, SecurityContext securityContext) { - super(settings, AuthenticateAction.NAME, threadPool, transportService, actionFilters, + super(settings, AuthenticateAction.NAME, transportService, actionFilters, (Supplier) AuthenticateRequest::new); this.securityContext = securityContext; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java index 8f0256b7e7773..78b4ae0193655 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; @@ -25,9 +24,9 @@ public class TransportChangePasswordAction extends HandledTransportAction { private final NativeUsersStore usersStore; @Inject - public TransportDeleteUserAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, + public TransportDeleteUserAction(Settings settings, ActionFilters actionFilters, NativeUsersStore usersStore, TransportService transportService) { - super(settings, DeleteUserAction.NAME, threadPool, transportService, actionFilters, + super(settings, DeleteUserAction.NAME, transportService, actionFilters, (Supplier) DeleteUserRequest::new); this.usersStore = usersStore; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java index f40db20a339ef..49e8c9d96aba9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.GetUsersAction; import org.elasticsearch.xpack.core.security.action.user.GetUsersRequest; @@ -36,9 +35,9 @@ public class TransportGetUsersAction extends HandledTransportAction { + private final ThreadPool threadPool; private final AuthorizationService authorizationService; @Inject public TransportHasPrivilegesAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, AuthorizationService authorizationService) { - super(settings, HasPrivilegesAction.NAME, threadPool, transportService, actionFilters, HasPrivilegesRequest::new); + super(settings, HasPrivilegesAction.NAME, transportService, actionFilters, HasPrivilegesRequest::new); + this.threadPool = threadPool; this.authorizationService = authorizationService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java index f2b32e68a79ba..85411b0e75f89 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; @@ -28,9 +27,9 @@ public class TransportPutUserAction extends HandledTransportAction { + private final ThreadPool threadPool; private final NativeUsersStore usersStore; @Inject public TransportSetEnabledAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, NativeUsersStore usersStore) { - super(settings, SetEnabledAction.NAME, threadPool, transportService, actionFilters, SetEnabledRequest::new); + super(settings, SetEnabledAction.NAME, transportService, actionFilters, SetEnabledRequest::new); + this.threadPool = threadPool; this.usersStore = usersStore; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java index e7eb1fcc8d798..fba4afe47911e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleResponse; @@ -44,7 +43,7 @@ public void testReservedRole() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); - TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); @@ -75,7 +74,7 @@ public void testValidRole() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); - TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); @@ -119,7 +118,7 @@ public void testException() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); - TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java index 9c19bf2097d22..27ae467c786db 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.GetRolesRequest; import org.elasticsearch.xpack.core.security.action.role.GetRolesResponse; @@ -44,7 +43,7 @@ public void testReservedRoles() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService, new ReservedRolesStore()); final int size = randomIntBetween(1, ReservedRolesStore.names().size()); @@ -90,7 +89,7 @@ public void testStoreRoles() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService, new ReservedRolesStore()); GetRolesRequest request = new GetRolesRequest(); @@ -142,7 +141,7 @@ public void testGetAllOrMix() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService, new ReservedRolesStore()); final List expectedNames = new ArrayList<>(); @@ -206,7 +205,7 @@ public void testException() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService, new ReservedRolesStore()); GetRolesRequest request = new GetRolesRequest(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java index 94a69cc044253..8392f92e0c31f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; @@ -45,8 +44,7 @@ public void testReservedRole() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); @@ -76,8 +74,7 @@ public void testValidRole() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); final boolean created = randomBoolean(); PutRoleRequest request = new PutRoleRequest(); @@ -120,8 +117,7 @@ public void testException() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java index cc67a4facb0ed..ea6713bb85fe9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java @@ -5,19 +5,12 @@ */ package org.elasticsearch.xpack.security.action.rolemapping; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsResponse; @@ -26,6 +19,12 @@ import org.hamcrest.Matchers; import org.junit.Before; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -46,8 +45,7 @@ public void setupMocks() { store = mock(NativeRoleMappingStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - action = new TransportGetRoleMappingsAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - transportService, store); + action = new TransportGetRoleMappingsAction(Settings.EMPTY, mock(ActionFilters.class), transportService, store); namesRef = new AtomicReference<>(null); result = Collections.emptyList(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index 3ba584440bb42..b105c0d5d0ea5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -5,25 +5,24 @@ */ package org.elasticsearch.xpack.security.action.rolemapping; -import java.util.Arrays; -import java.util.Collections; -import java.util.Map; -import java.util.concurrent.atomic.AtomicReference; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.junit.Before; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; + import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -42,8 +41,7 @@ public void setupMocks() { store = mock(NativeRoleMappingStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - action = new TransportPutRoleMappingAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), transportService, store); + action = new TransportPutRoleMappingAction(Settings.EMPTY, mock(ActionFilters.class), transportService, store); requestRef = new AtomicReference<>(null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index b46d307866284..b9232903f52ff 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -168,8 +168,7 @@ void doExecute(Action action, Request request, ActionListener null, null, Collections.emptySet()); final Realms realms = mock(Realms.class); - action = new TransportSamlInvalidateSessionAction(settings, threadPool, transportService, - mock(ActionFilters.class),tokenService, realms); + action = new TransportSamlInvalidateSessionAction(settings, transportService, mock(ActionFilters.class),tokenService, realms); final Path metadata = PathUtils.get(SamlRealm.class.getResource("idp1.xml").toURI()); final Environment env = TestEnvironment.newEnvironment(settings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index 79d4978cfd248..6d177d89021ab 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -183,7 +183,7 @@ public void setup() throws Exception { final TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); final Realms realms = mock(Realms.class); - action = new TransportSamlLogoutAction(settings, threadPool, transportService, mock(ActionFilters.class), realms, tokenService); + action = new TransportSamlLogoutAction(settings, transportService, mock(ActionFilters.class), realms, tokenService); final Path metadata = PathUtils.get(SamlRealm.class.getResource("idp1.xml").toURI()); final Environment env = TestEnvironment.newEnvironment(settings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java index 20af681f477ec..66e2192eee5dd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequest; @@ -39,7 +38,7 @@ public void testInternalUser() { when(securityContext.getUser()).thenReturn(randomFrom(SystemUser.INSTANCE, XPackUser.INSTANCE)); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, transportService, mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); @@ -65,7 +64,7 @@ public void testNullUser() { SecurityContext securityContext = mock(SecurityContext.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, transportService, mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); @@ -93,7 +92,7 @@ public void testValidUser() { when(securityContext.getUser()).thenReturn(user); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, transportService, mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java index bc1c42f66a55b..4aa68c24c8d71 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordResponse; @@ -51,7 +50,7 @@ public void testAnonymousUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportChangePasswordAction action = new TransportChangePasswordAction(settings, mock(ThreadPool.class), transportService, + TransportChangePasswordAction action = new TransportChangePasswordAction(settings, transportService, mock(ActionFilters.class), usersStore); ChangePasswordRequest request = new ChangePasswordRequest(); @@ -82,7 +81,7 @@ public void testInternalUsers() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, transportService, mock(ActionFilters.class), usersStore); ChangePasswordRequest request = new ChangePasswordRequest(); @@ -124,7 +123,7 @@ public void testValidUser() { }).when(usersStore).changePassword(eq(request), any(ActionListener.class)); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, transportService, mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); @@ -165,7 +164,7 @@ public Void answer(InvocationOnMock invocation) { }).when(usersStore).changePassword(eq(request), any(ActionListener.class)); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, transportService, mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java index dab63fcc31336..9c61d0cde520c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; import org.elasticsearch.xpack.core.security.action.user.DeleteUserResponse; @@ -48,8 +47,7 @@ public void testAnonymousUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(settings, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportDeleteUserAction action = new TransportDeleteUserAction(settings, mock(ActionFilters.class), usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(new AnonymousUser(settings).principal()); @@ -77,8 +75,8 @@ public void testInternalUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ActionFilters.class), + usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -107,8 +105,8 @@ public void testReservedUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ActionFilters.class), + usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(reserved.principal()); @@ -137,7 +135,7 @@ public void testValidUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); final boolean found = randomBoolean(); @@ -178,7 +176,7 @@ public void testException() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); final DeleteUserRequest request = new DeleteUserRequest(user.principal()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java index fdb37b2f5bd8c..070fe4e64317b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java @@ -91,7 +91,7 @@ public void testAnonymousUser() { new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, securityIndex, threadPool); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, reservedRealm); GetUsersRequest request = new GetUsersRequest(); @@ -126,7 +126,7 @@ public void testInternalUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); @@ -169,7 +169,7 @@ public void testReservedUsersOnly() { final List names = reservedUsers.stream().map(User::principal).collect(Collectors.toList()); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, reservedRealm); logger.error("names {}", names); @@ -209,7 +209,7 @@ public void testGetAllUsers() { securityIndex, threadPool); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, reservedRealm); GetUsersRequest request = new GetUsersRequest(); @@ -256,7 +256,7 @@ public void testGetStoreOnlyUsers() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); @@ -304,7 +304,7 @@ public void testException() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java index 140508b51a1b0..5f9a3f5243160 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java @@ -58,8 +58,7 @@ public void testAnonymousUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(settings, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportPutUserAction action = new TransportPutUserAction(settings, mock(ActionFilters.class), usersStore, transportService); PutUserRequest request = new PutUserRequest(); request.username(anonymousUser.principal()); @@ -88,8 +87,7 @@ public void testSystemUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); PutUserRequest request = new PutUserRequest(); request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -129,7 +127,7 @@ public void testReservedUser() { final User reserved = randomFrom(userFuture.actionGet().toArray(new User[0])); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); PutUserRequest request = new PutUserRequest(); @@ -159,7 +157,7 @@ public void testValidUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); final boolean isCreate = randomBoolean(); @@ -205,7 +203,7 @@ public void testException() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); final PutUserRequest request = new PutUserRequest(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java index 3cbb2f8a1bc93..b6a53ae95f098 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.session.Configuration; @@ -25,9 +24,9 @@ public class TransportSqlClearCursorAction extends HandledTransportAction) SqlClearCursorRequest::new); this.planExecutor = planExecutor; this.sqlLicenseChecker = sqlLicenseChecker; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 044683a29ad67..bc96b3de97307 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.proto.ColumnInfo; @@ -34,9 +33,9 @@ public class TransportSqlQueryAction extends HandledTransportAction) SqlQueryRequest::new); this.planExecutor = planExecutor; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java index 0df3b2ad1bb50..61772ce6bb1e9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.session.Configuration; @@ -24,11 +23,9 @@ public class TransportSqlTranslateAction extends HandledTransportAction) SqlTranslateRequest::new); this.planExecutor = planExecutor; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java index cdb1479eec5e3..3d493299d7036 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java @@ -24,9 +24,9 @@ public abstract class WatcherTransportAction request) { - super(settings, actionName, threadPool, transportService, actionFilters, request); + super(settings, actionName, transportService, actionFilters, request); this.licenseState = licenseState; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java index 6a31b0e5cb054..8c056d0dcb8be 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionSnapshot; @@ -54,10 +53,10 @@ public class TransportAckWatchAction extends WatcherTransportAction) DeleteWatchRequest::new); this.client = client; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java index 6ccc7518d8b4c..0cc9af6aafa7b 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java @@ -57,6 +57,7 @@ */ public class TransportExecuteWatchAction extends WatcherTransportAction { + private final ThreadPool threadPool; private final ExecutionService executionService; private final Clock clock; private final TriggerService triggerService; @@ -68,7 +69,8 @@ public TransportExecuteWatchAction(Settings settings, TransportService transport ActionFilters actionFilters, ExecutionService executionService, Clock clock, XPackLicenseState licenseState, WatchParser watchParser, Client client, TriggerService triggerService) { - super(settings, ExecuteWatchAction.NAME, transportService, threadPool, actionFilters, licenseState, ExecuteWatchRequest::new); + super(settings, ExecuteWatchAction.NAME, transportService, actionFilters, licenseState, ExecuteWatchRequest::new); + this.threadPool = threadPool; this.executionService = executionService; this.clock = clock; this.triggerService = triggerService; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java index 6891e3e6272b2..60118f2afdab9 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchAction; @@ -43,9 +42,9 @@ public class TransportGetWatchAction extends WatcherTransportAction { + private final ThreadPool threadPool; private final Clock clock; private final WatchParser parser; private final Client client; @@ -64,7 +65,8 @@ public class TransportPutWatchAction extends WatcherTransportAction Date: Thu, 21 Jun 2018 11:32:11 -0700 Subject: [PATCH 65/92] [DOCS] Add code snippet testing in more ML APIs (#31339) --- x-pack/docs/build.gradle | 22 ++++++++++++++----- x-pack/docs/en/rest-api/ml/forecast.asciidoc | 8 +++---- .../en/rest-api/ml/preview-datafeed.asciidoc | 22 ++++++++++--------- 3 files changed, 32 insertions(+), 20 deletions(-) diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 6c0a4bfcac647..912c9965b4d9a 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -47,7 +47,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/watcher/trigger/schedule/yearly.asciidoc', 'en/watcher/troubleshooting.asciidoc', 'en/rest-api/ml/delete-snapshot.asciidoc', - 'en/rest-api/ml/forecast.asciidoc', 'en/rest-api/ml/get-bucket.asciidoc', 'en/rest-api/ml/get-job-stats.asciidoc', 'en/rest-api/ml/get-overall-buckets.asciidoc', @@ -56,7 +55,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/rest-api/ml/get-influencer.asciidoc', 'en/rest-api/ml/get-snapshot.asciidoc', 'en/rest-api/ml/post-data.asciidoc', - 'en/rest-api/ml/preview-datafeed.asciidoc', 'en/rest-api/ml/revert-snapshot.asciidoc', 'en/rest-api/ml/update-snapshot.asciidoc', 'en/rest-api/watcher/stats.asciidoc', @@ -296,7 +294,9 @@ setups['farequote_index'] = ''' responsetime: type: float airline: - type: keyword + type: keyword + doc_count: + type: integer ''' setups['farequote_data'] = setups['farequote_index'] + ''' - do: @@ -306,11 +306,11 @@ setups['farequote_data'] = setups['farequote_index'] + ''' refresh: true body: | {"index": {"_id":"1"}} - {"airline":"JZA","responsetime":990.4628,"time":"2016-02-07T00:00:00+0000"} + {"airline":"JZA","responsetime":990.4628,"time":"2016-02-07T00:00:00+0000", "doc_count": 5} {"index": {"_id":"2"}} - {"airline":"JBU","responsetime":877.5927,"time":"2016-02-07T00:00:00+0000"} + {"airline":"JBU","responsetime":877.5927,"time":"2016-02-07T00:00:00+0000", "doc_count": 23} {"index": {"_id":"3"}} - {"airline":"KLM","responsetime":1355.4812,"time":"2016-02-07T00:00:00+0000"} + {"airline":"KLM","responsetime":1355.4812,"time":"2016-02-07T00:00:00+0000", "doc_count": 42} ''' setups['farequote_job'] = setups['farequote_data'] + ''' - do: @@ -332,6 +332,16 @@ setups['farequote_job'] = setups['farequote_data'] + ''' } } ''' +setups['farequote_datafeed'] = setups['farequote_job'] + ''' + - do: + xpack.ml.put_datafeed: + datafeed_id: "datafeed-farequote" + body: > + { + "job_id":"farequote", + "indexes":"farequote" + } +''' setups['server_metrics_index'] = ''' - do: indices.create: diff --git a/x-pack/docs/en/rest-api/ml/forecast.asciidoc b/x-pack/docs/en/rest-api/ml/forecast.asciidoc index 169debef7b6cb..99647ecae1b25 100644 --- a/x-pack/docs/en/rest-api/ml/forecast.asciidoc +++ b/x-pack/docs/en/rest-api/ml/forecast.asciidoc @@ -5,7 +5,7 @@ Forecast Jobs ++++ -Predict the future behavior of a time series by using historical behavior. +Predicts the future behavior of a time series by using its historical behavior. ==== Request @@ -62,7 +62,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_forecast } -------------------------------------------------- // CONSOLE -// TEST[skip:todo] +// TEST[skip:requires delay] When the forecast is created, you receive the following results: [source,js] @@ -72,7 +72,7 @@ When the forecast is created, you receive the following results: "forecast_id": "wkCWa2IB2lF8nSE_TzZo" } ---- +// NOTCONSOLE You can subsequently see the forecast in the *Single Metric Viewer* in {kib}. -//and in the results that you retrieve by using {ml} APIs such as the -//<> and <>. + diff --git a/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc b/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc index e6b51f8ef069f..637b506cb9af7 100644 --- a/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc +++ b/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc @@ -31,7 +31,6 @@ structure of the data that will be passed to the anomaly detection engine. You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. -//<>. ==== Security Integration @@ -54,27 +53,30 @@ The following example obtains a preview of the `datafeed-farequote` {dfeed}: GET _xpack/ml/datafeeds/datafeed-farequote/_preview -------------------------------------------------- // CONSOLE -// TEST[skip:todo] +// TEST[setup:farequote_datafeed] The data that is returned for this example is as follows: [source,js] ---- [ { - "@timestamp": 1454803200000, - "airline": "AAL", - "responsetime": 132.20460510253906 - }, - { - "@timestamp": 1454803200000, + "time": 1454803200000, "airline": "JZA", + "doc_count": 5, "responsetime": 990.4628295898438 }, { - "@timestamp": 1454803200000, + "time": 1454803200000, "airline": "JBU", + "doc_count": 23, "responsetime": 877.5927124023438 }, - ... + { + "time": 1454803200000, + "airline": "KLM", + "doc_count": 42, + "responsetime": 1355.481201171875 + } ] ---- +// TESTRESPONSE From 99f503e3bea5c289907ff16b438875ecf2a8f2a2 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Fri, 22 Jun 2018 00:16:03 +0300 Subject: [PATCH 66/92] [DOCS] Fix REST tests in SQL docs Fixed a search & replace gone awry Tweaked the docs a bit --- x-pack/docs/en/sql/index.asciidoc | 6 +++--- x-pack/docs/en/sql/language/syntax/select.asciidoc | 12 ++++++------ x-pack/docs/en/sql/overview.asciidoc | 2 ++ 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/x-pack/docs/en/sql/index.asciidoc b/x-pack/docs/en/sql/index.asciidoc index 4c2130208927a..f96b83db08ad6 100644 --- a/x-pack/docs/en/sql/index.asciidoc +++ b/x-pack/docs/en/sql/index.asciidoc @@ -35,11 +35,11 @@ indices and return results in tabular format. SQL and print tabular results. <>:: A JDBC driver for {es}. -<>:: - List of functions and operators supported. <>:: Overview of the {es-sql} language, such as supported data types, commands and syntax. +<>:: + List of functions and operators supported. -- include::overview.asciidoc[] @@ -47,8 +47,8 @@ include::getting-started.asciidoc[] include::concepts.asciidoc[] include::security.asciidoc[] include::endpoints/index.asciidoc[] -include::functions/index.asciidoc[] include::language/index.asciidoc[] +include::functions/index.asciidoc[] include::appendix/index.asciidoc[] :jdbc-tests!: diff --git a/x-pack/docs/en/sql/language/syntax/select.asciidoc b/x-pack/docs/en/sql/language/syntax/select.asciidoc index f39cbc0c2f8ca..35c2bf0737db9 100644 --- a/x-pack/docs/en/sql/language/syntax/select.asciidoc +++ b/x-pack/docs/en/sql/language/syntax/select.asciidoc @@ -191,14 +191,14 @@ which results in something like: [source,text] -------------------------------------------------- author | name | page_count | release_date ------------------`--------------------`---------------`------------------------ +-----------------+--------------------+---------------+------------------------ Peter F. Hamilton|Pandora's Star |768 |2004-03-02T00:00:00.000Z Vernor Vinge |A Fire Upon the Deep|613 |1992-06-01T00:00:00.000Z Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z Alastair Reynolds|Revelation Space |585 |2000-03-15T00:00:00.000Z James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00.000Z -------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/] +// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/] // TESTRESPONSE[_cat] [[sql-syntax-order-by-score]] @@ -228,13 +228,13 @@ Which results in something like: [source,text] -------------------------------------------------- SCORE() | author | name | page_count | release_date ----------------`---------------`-------------------`---------------`------------------------ +---------------+---------------+-------------------+---------------+------------------------ 2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z 1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z 1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z 1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z -------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/ s/\(/\\\(/ s/\)/\\\)/] +// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/ s/\(/\\\(/ s/\)/\\\)/] // TESTRESPONSE[_cat] Note that you can return `SCORE()` by adding it to the where clause. This @@ -253,13 +253,13 @@ POST /_xpack/sql?format=txt [source,text] -------------------------------------------------- SCORE() | author | name | page_count | release_date ----------------`---------------`-------------------`---------------`------------------------ +---------------+---------------+-------------------+---------------+------------------------ 2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z 1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z 1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z 1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z -------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/ s/\(/\\\(/ s/\)/\\\)/] +// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/ s/\(/\\\(/ s/\)/\\\)/] // TESTRESPONSE[_cat] NOTE: diff --git a/x-pack/docs/en/sql/overview.asciidoc b/x-pack/docs/en/sql/overview.asciidoc index 34d0dfb538352..36eff69f6263d 100644 --- a/x-pack/docs/en/sql/overview.asciidoc +++ b/x-pack/docs/en/sql/overview.asciidoc @@ -4,6 +4,7 @@ {es-sql} aims to provide a powerful yet lightweight SQL interface to {es}. [[sql-introduction]] +[float] === Introduction {es-sql} is an X-Pack component that allows SQL-like queries to be executed in real-time against {es}. @@ -12,6 +13,7 @@ _natively_ inside {es}. One can think of {es-sql} as a _translator_, one that understands both SQL and {es} and makes it easy to read and process data in real-time, at scale by leveraging {es} capabilities. [[sql-why]] +[float] === Why {es-sql} ? Native integration:: From 3b7225e9d1dead1718132a51fdde5d1cbe4a42b9 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Thu, 21 Jun 2018 16:53:20 -0700 Subject: [PATCH 67/92] In NumberFieldType equals and hashCode, make sure that NumberType is taken into account. (#31514) --- .../index/mapper/NumberFieldMapper.java | 18 ++++++++++++++++-- .../index/mapper/NumberFieldTypeTests.java | 15 +++++++++++++-- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 9c327c5294efe..b4531f9c489e3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -846,7 +846,7 @@ private static double objectToDouble(Object value) { public static final class NumberFieldType extends SimpleMappedFieldType { - NumberType type; + private final NumberType type; public NumberFieldType(NumberType type) { super(); @@ -856,7 +856,7 @@ public NumberFieldType(NumberType type) { setOmitNorms(true); } - NumberFieldType(NumberFieldType other) { + private NumberFieldType(NumberFieldType other) { super(other); this.type = other.type; } @@ -936,6 +936,20 @@ public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) { return new DocValueFormat.Decimal(format); } } + + @Override + public boolean equals(Object o) { + if (super.equals(o) == false) { + return false; + } + NumberFieldType that = (NumberFieldType) o; + return type == that.type; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), type); + } } private Explicit ignoreMalformed; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java index 3ffe48fe70af6..4b2967553e57b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.document.Document; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.HalfFloatPoint; @@ -37,10 +36,11 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.TestUtil; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.hamcrest.Matchers; import org.junit.Before; @@ -68,6 +68,17 @@ protected MappedFieldType createDefaultFieldType() { return new NumberFieldMapper.NumberFieldType(type); } + public void testEqualsWithDifferentNumberTypes() { + NumberType type = randomFrom(NumberType.values()); + NumberFieldType fieldType = new NumberFieldType(type); + + NumberType otherType = randomValueOtherThan(type, + () -> randomFrom(NumberType.values())); + NumberFieldType otherFieldType = new NumberFieldType(otherType); + + assertNotEquals(fieldType, otherFieldType); + } + public void testIsFieldWithinQuery() throws IOException { MappedFieldType ft = createDefaultFieldType(); // current impl ignores args and should always return INTERSECTS From 724438a0b084a9a9d20790ce141bf8ec18265950 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Fri, 22 Jun 2018 10:15:38 +1000 Subject: [PATCH 68/92] [Security] Check auth scheme case insensitively (#31490) According to RFC 7617, the Basic authentication scheme name should not be case sensitive. Case insensitive comparisons are also applicable for the bearer tokens where Bearer authentication scheme is used as per RFC 6750 and RFC 7235 Some Http clients may send authentication scheme names in different case types for eg. Basic, basic, BASIC, BEARER etc., so the lack of case-insensitive check is an issue when these clients try to authenticate with elasticsearch. This commit adds case-insensitive checks for Basic and Bearer authentication schemes. Closes #31486 --- .../authc/support/UsernamePasswordToken.java | 13 ++++++++----- .../xpack/security/authc/TokenService.java | 2 +- .../security/authc/TokenServiceTests.java | 18 +++++++++++++++++- .../support/UsernamePasswordTokenTests.java | 7 ++++--- 4 files changed, 30 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java index 4fdf32608dd6a..d8e58c29d237b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.security.authc.support; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; @@ -20,6 +21,8 @@ public class UsernamePasswordToken implements AuthenticationToken { public static final String BASIC_AUTH_PREFIX = "Basic "; public static final String BASIC_AUTH_HEADER = "Authorization"; + // authorization scheme check is case-insensitive + private static final boolean IGNORE_CASE_AUTH_HEADER_MATCH = true; private final String username; private final SecureString password; @@ -79,15 +82,15 @@ public int hashCode() { public static UsernamePasswordToken extractToken(ThreadContext context) { String authStr = context.getHeader(BASIC_AUTH_HEADER); - if (authStr == null) { - return null; - } - return extractToken(authStr); } private static UsernamePasswordToken extractToken(String headerValue) { - if (headerValue.startsWith(BASIC_AUTH_PREFIX) == false) { + if (Strings.isNullOrEmpty(headerValue)) { + return null; + } + if (headerValue.regionMatches(IGNORE_CASE_AUTH_HEADER_MATCH, 0, BASIC_AUTH_PREFIX, 0, + BASIC_AUTH_PREFIX.length()) == false) { // the header does not start with 'Basic ' so we cannot use it, but it may be valid for another realm return null; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 2934fb8062de4..8b6dd8295d399 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -1007,7 +1007,7 @@ private void maybeStartTokenRemover() { */ private String getFromHeader(ThreadContext threadContext) { String header = threadContext.getHeader("Authorization"); - if (Strings.hasLength(header) && header.startsWith("Bearer ") + if (Strings.hasText(header) && header.regionMatches(true, 0, "Bearer ", 0, "Bearer ".length()) && header.length() > "Bearer ".length()) { return header.substring("Bearer ".length()); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 28cf4bf95c924..d5e67f3996a7b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -71,6 +71,7 @@ import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; @@ -162,7 +163,7 @@ public void testAttachAndGetToken() throws Exception { mockGetTokenFromId(token); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); - requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); + requestContext.putHeader("Authorization", randomFrom("Bearer ", "BEARER ", "bearer ") + tokenService.getUserTokenString(token)); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); @@ -183,6 +184,21 @@ public void testAttachAndGetToken() throws Exception { } } + public void testInvalidAuthorizationHeader() throws Exception { + TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); + ThreadContext requestContext = new ThreadContext(Settings.EMPTY); + String token = randomFrom("", " "); + String authScheme = randomFrom("Bearer ", "BEARER ", "bearer ", "Basic "); + requestContext.putHeader("Authorization", authScheme + token); + + try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { + PlainActionFuture future = new PlainActionFuture<>(); + tokenService.getAndValidateToken(requestContext, future); + UserToken serialized = future.get(); + assertThat(serialized, nullValue()); + } + } + public void testRotateKey() throws Exception { TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/UsernamePasswordTokenTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/UsernamePasswordTokenTests.java index 57c452798844c..86b9635851bb6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/UsernamePasswordTokenTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/UsernamePasswordTokenTests.java @@ -45,7 +45,8 @@ public void testPutToken() throws Exception { public void testExtractToken() throws Exception { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - String header = "Basic " + Base64.getEncoder().encodeToString("user1:test123".getBytes(StandardCharsets.UTF_8)); + final String header = randomFrom("Basic ", "basic ", "BASIC ") + + Base64.getEncoder().encodeToString("user1:test123".getBytes(StandardCharsets.UTF_8)); threadContext.putHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, header); UsernamePasswordToken token = UsernamePasswordToken.extractToken(threadContext); assertThat(token, notNullValue()); @@ -54,7 +55,7 @@ public void testExtractToken() throws Exception { } public void testExtractTokenInvalid() throws Exception { - String[] invalidValues = { "Basic ", "Basic f" }; + final String[] invalidValues = { "Basic ", "Basic f", "basic " }; for (String value : invalidValues) { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); threadContext.putHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, value); @@ -70,7 +71,7 @@ public void testExtractTokenInvalid() throws Exception { public void testHeaderNotMatchingReturnsNull() { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - String header = randomFrom("BasicBroken", "invalid", "Basic"); + final String header = randomFrom("Basic", "BasicBroken", "invalid", " basic "); threadContext.putHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, header); UsernamePasswordToken extracted = UsernamePasswordToken.extractToken(threadContext); assertThat(extracted, nullValue()); From 009ae48cbae967d102b28d8b8e0bf25197c90421 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Fri, 22 Jun 2018 17:47:20 +1000 Subject: [PATCH 69/92] [PkiRealm] Invalidate cache on role mappings change (#31510) PkiRealm caches successful authentications and provides ways to invalidate the cache. But in some scenario's the cache was not being invalidated on role mapping change. PkiRealm does not inform role mapper to be notified for cache refresh on role mapping updates. The logic in `TransportClearRealmCacheAction#nodeOperation` which gets invoked for refreshing cache on realms, considers null or empty realm names in the request as clear cache on all realms. When LDAP realm is not present then it clears cache for all realms so it works fine, but when LDAP realm is configured then role mapper sends a request with LDAP realm names and so the cache is cleared only for those realms. This commit resolves the issue by registering PkiRealm with role mapper for cache refresh. PkiRealm implements CachingRealm and as it does not extend CachingUsernamePasswordRealm, have modified the interface method `refreshRealmOnChange` to accept CachingRealm. --- .../org/elasticsearch/xpack/security/authc/pki/PkiRealm.java | 1 + .../xpack/security/authc/support/CachingRealm.java | 5 +++++ .../xpack/security/authc/support/DnRoleMapper.java | 2 +- .../xpack/security/authc/support/UserRoleMapper.java | 2 +- .../security/authc/support/mapper/CompositeRoleMapper.java | 4 ++-- .../authc/support/mapper/NativeRoleMappingStore.java | 4 ++-- .../xpack/security/authc/pki/PkiRealmTests.java | 3 +++ 7 files changed, 15 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java index e65ac29aafe95..7b9eabfd7066f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java @@ -86,6 +86,7 @@ public PkiRealm(RealmConfig config, ResourceWatcherService watcherService, Nativ this.trustManager = trustManagers(config); this.principalPattern = PkiRealmSettings.USERNAME_PATTERN_SETTING.get(config.settings()); this.roleMapper = roleMapper; + this.roleMapper.refreshRealmOnChange(this); this.cache = CacheBuilder.builder() .setExpireAfterWrite(PkiRealmSettings.CACHE_TTL_SETTING.get(config.settings())) .setMaximumWeight(PkiRealmSettings.CACHE_MAX_USERS_SETTING.get(config.settings())) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingRealm.java index 4c18ac2df6d6e..6089c8f9a70fb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingRealm.java @@ -13,6 +13,11 @@ */ public interface CachingRealm { + /** + * @return The name of this realm. + */ + String name(); + /** * Expires a single user from the cache identified by the String agument * @param username the identifier of the user to be cleared diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java index 6516b02f68d0b..9ff4cd9be824b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java @@ -69,7 +69,7 @@ public DnRoleMapper(RealmConfig config, ResourceWatcherService watcherService) { } @Override - public void refreshRealmOnChange(CachingUsernamePasswordRealm realm) { + public void refreshRealmOnChange(CachingRealm realm) { addListener(realm::expireAll); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java index ffdab15e3b507..8c60e565e681a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java @@ -44,7 +44,7 @@ public interface UserRoleMapper { * the whole cluster depending on whether this role-mapper has node-local data or cluster-wide * data. */ - void refreshRealmOnChange(CachingUsernamePasswordRealm realm); + void refreshRealmOnChange(CachingRealm realm); /** * A representation of a user for whom roles should be mapped. diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java index 0814469cfcea7..956060a65789c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java @@ -16,7 +16,7 @@ import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.security.authc.RealmConfig; -import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; +import org.elasticsearch.xpack.security.authc.support.CachingRealm; import org.elasticsearch.xpack.security.authc.support.DnRoleMapper; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; @@ -48,7 +48,7 @@ public void resolveRoles(UserData user, ActionListener> listener) { } @Override - public void refreshRealmOnChange(CachingUsernamePasswordRealm realm) { + public void refreshRealmOnChange(CachingRealm realm) { this.delegates.forEach(mapper -> mapper.refreshRealmOnChange(realm)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 7df4114863de2..677d13082ca90 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -34,7 +34,7 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionModel; import org.elasticsearch.xpack.core.security.client.SecurityClient; -import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; +import org.elasticsearch.xpack.security.authc.support.CachingRealm; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -369,7 +369,7 @@ public void resolveRoles(UserData user, ActionListener> listener) { * @see ClearRealmCacheAction */ @Override - public void refreshRealmOnChange(CachingUsernamePasswordRealm realm) { + public void refreshRealmOnChange(CachingRealm realm) { realmsToRefresh.add(realm.name()); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java index 74f6598f8dd1c..44d5859d12b67 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java @@ -50,6 +50,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class PkiRealmTests extends ESTestCase { @@ -104,6 +105,7 @@ private void assertSuccessfulAuthentication(Set roles) throws Exception UserRoleMapper roleMapper = mock(UserRoleMapper.class); PkiRealm realm = new PkiRealm(new RealmConfig("", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)), roleMapper); + verify(roleMapper).refreshRealmOnChange(realm); Mockito.doAnswer(invocation -> { final UserRoleMapper.UserData userData = (UserRoleMapper.UserData) invocation.getArguments()[0]; final ActionListener> listener = (ActionListener>) invocation.getArguments()[1]; @@ -144,6 +146,7 @@ private void assertSuccessfulAuthentication(Set roles) throws Exception final int numTimes = invalidate ? 2 : 1; verify(roleMapper, times(numTimes)).resolveRoles(any(UserRoleMapper.UserData.class), any(ActionListener.class)); + verifyNoMoreInteractions(roleMapper); } public void testCustomUsernamePattern() throws Exception { From 0352d88621c1d3794197105f7fef97e669c40179 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 22 Jun 2018 09:57:32 +0200 Subject: [PATCH 70/92] Get Mapping API to honour allow_no_indices and ignore_unavailable (#31507) Get Mapping currently throws index not found exception (and returns 404 status code) from the REST layer whenever an index was specified and no indices have been returned. We should not have this logic in the REST layer though as only our index resolver should decide whether we need to throw exceptions or not based on provided indices and corresponding indices options. Closes #31485 --- .../indices.get_mapping/30_missing_index.yml | 21 +++++++++++++++++++ .../50_wildcard_expansion.yml | 16 +++++++++++++- .../admin/indices/RestGetMappingAction.java | 12 +++-------- 3 files changed, 39 insertions(+), 10 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml index cf4e5b56e786e..4d3abb292f467 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml @@ -13,3 +13,24 @@ indices.get_mapping: index: test_index +--- +"Index missing, ignore_unavailable=true": + - skip: + version: " - 6.99.99" + reason: ignore_unavailable was ignored in previous versions + - do: + indices.get_mapping: + index: test_index + ignore_unavailable: true + + - match: { '': {} } + +--- +"Index missing, ignore_unavailable=true, allow_no_indices=false": + - do: + catch: missing + indices.get_mapping: + index: test_index + ignore_unavailable: true + allow_no_indices: false + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml index a0552f395edb5..d1a95b2690745 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml @@ -94,12 +94,26 @@ setup: --- "Get test-* with wildcard_expansion=none": + - skip: + version: " - 6.99.99" + reason: allow_no_indices (defaults to true) was ignored in previous versions - do: - catch: missing indices.get_mapping: index: test-x* expand_wildcards: none + - match: { '': {} } +--- +"Get test-* with wildcard_expansion=none allow_no_indices=false": + - skip: + version: " - 6.99.99" + reason: allow_no_indices was ignored in previous versions + - do: + catch: missing + indices.get_mapping: + index: test-x* + expand_wildcards: none + allow_no_indices: false --- "Get test-* with wildcard_expansion=open,closed": diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index 08f8449b7017f..f5d99bbb46ca1 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.TypeMissingException; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -89,14 +88,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC @Override public RestResponse buildResponse(final GetMappingsResponse response, final XContentBuilder builder) throws Exception { final ImmutableOpenMap> mappingsByIndex = response.getMappings(); - if (mappingsByIndex.isEmpty() && (indices.length != 0 || types.length != 0)) { - if (indices.length != 0 && types.length == 0) { - builder.close(); - return new BytesRestResponse(channel, new IndexNotFoundException(String.join(",", indices))); - } else { - builder.close(); - return new BytesRestResponse(channel, new TypeMissingException("_all", String.join(",", types))); - } + if (mappingsByIndex.isEmpty() && types.length != 0) { + builder.close(); + return new BytesRestResponse(channel, new TypeMissingException("_all", String.join(",", types))); } final Set typeNames = new HashSet<>(); From eade161894011cf19d47d14e4c3c1e15dd945227 Mon Sep 17 00:00:00 2001 From: Sohaib Iftikhar Date: Fri, 22 Jun 2018 09:59:04 +0200 Subject: [PATCH 71/92] REST high-level client: add simulate pipeline API (#31158) relates to #27205 --- .../elasticsearch/client/IngestClient.java | 35 +++++ .../client/RequestConverters.java | 15 ++ .../client/ESRestHighLevelClientTestCase.java | 10 +- .../elasticsearch/client/IngestClientIT.java | 99 +++++++++++++ .../client/RequestConvertersTests.java | 29 ++++ .../IngestClientDocumentationIT.java | 111 ++++++++++++++ .../ingest/simulate_pipeline.asciidoc | 90 ++++++++++++ .../high-level/supported-apis.asciidoc | 2 + .../ingest/SimulateDocumentBaseResult.java | 36 +++++ .../ingest/SimulateDocumentVerboseResult.java | 23 ++- .../ingest/SimulatePipelineRequest.java | 10 +- .../ingest/SimulatePipelineResponse.java | 72 +++++++++ .../ingest/SimulateProcessorResult.java | 74 +++++++++- .../ingest/WriteableIngestDocument.java | 83 ++++++++++- .../elasticsearch/ingest/IngestDocument.java | 13 +- .../SimulateDocumentBaseResultTests.java | 138 ++++++++++++++++++ .../SimulateDocumentSimpleResultTests.java | 60 -------- .../SimulateDocumentVerboseResultTests.java | 113 ++++++++++++++ .../ingest/SimulatePipelineResponseTests.java | 135 ++++++++++++----- .../ingest/SimulateProcessorResultTests.java | 116 +++++++++++++-- .../ingest/WriteableIngestDocumentTests.java | 47 +++++- 21 files changed, 1182 insertions(+), 129 deletions(-) create mode 100644 docs/java-rest/high-level/ingest/simulate_pipeline.asciidoc create mode 100644 server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResultTests.java delete mode 100644 server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java create mode 100644 server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java index 5c5a82b52f438..340e14653971b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java @@ -24,6 +24,8 @@ import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.ingest.WritePipelineResponse; import java.io.IOException; @@ -125,4 +127,37 @@ public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions op restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::deletePipeline, options, WritePipelineResponse::fromXContent, listener, emptySet()); } + + /** + * Simulate a pipeline on a set of documents provided in the request + *

    + * See + * + * Simulate Pipeline API on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public SimulatePipelineResponse simulatePipeline(SimulatePipelineRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::simulatePipeline, options, + SimulatePipelineResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously simulate a pipeline on a set of documents provided in the request + *

    + * See + * + * Simulate Pipeline API on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void simulatePipelineAsync(SimulatePipelineRequest request, + RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::simulatePipeline, options, + SimulatePipelineResponse::fromXContent, listener, emptySet()); + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index cd67bc8e48325..3d5d275732a6c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -71,6 +71,7 @@ import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; @@ -886,6 +887,20 @@ static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws I return request; } + static Request simulatePipeline(SimulatePipelineRequest simulatePipelineRequest) throws IOException { + EndpointBuilder builder = new EndpointBuilder().addPathPartAsIs("_ingest/pipeline"); + if (simulatePipelineRequest.getId() != null && !simulatePipelineRequest.getId().isEmpty()) { + builder.addPathPart(simulatePipelineRequest.getId()); + } + builder.addPathPartAsIs("_simulate"); + String endpoint = builder.build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + Params params = new Params(request); + params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose())); + request.setEntity(createEntity(simulatePipelineRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request getAlias(GetAliasesRequest getAliasesRequest) { String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices(); String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java index 4ad39f547584b..69fbab30c336c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java @@ -85,9 +85,7 @@ private HighLevelClient(RestClient restClient) { } } - protected static XContentBuilder buildRandomXContentPipeline() throws IOException { - XContentType xContentType = randomFrom(XContentType.values()); - XContentBuilder pipelineBuilder = XContentBuilder.builder(xContentType.xContent()); + protected static XContentBuilder buildRandomXContentPipeline(XContentBuilder pipelineBuilder) throws IOException { pipelineBuilder.startObject(); { pipelineBuilder.field(Pipeline.DESCRIPTION_KEY, "some random set of processors"); @@ -114,6 +112,12 @@ protected static XContentBuilder buildRandomXContentPipeline() throws IOExceptio return pipelineBuilder; } + protected static XContentBuilder buildRandomXContentPipeline() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder pipelineBuilder = XContentBuilder.builder(xContentType.xContent()); + return buildRandomXContentPipeline(pipelineBuilder); + } + protected static void createPipeline(String pipelineId) throws IOException { XContentBuilder builder = buildRandomXContentPipeline(); createPipeline(new PutPipelineRequest(pipelineId, BytesReference.bytes(builder), builder.contentType())); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java index ecc0d0052d415..6fd6f95059577 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java @@ -23,12 +23,22 @@ import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; +import org.elasticsearch.action.ingest.SimulateDocumentResult; +import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.ingest.PipelineConfiguration; import java.io.IOException; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.core.IsInstanceOf.instanceOf; public class IngestClientIT extends ESRestHighLevelClientTestCase { @@ -80,4 +90,93 @@ public void testDeletePipeline() throws IOException { execute(request, highLevelClient().ingest()::deletePipeline, highLevelClient().ingest()::deletePipelineAsync); assertTrue(response.isAcknowledged()); } + + public void testSimulatePipeline() throws IOException { + testSimulatePipeline(false, false); + } + + public void testSimulatePipelineWithFailure() throws IOException { + testSimulatePipeline(false, true); + } + + public void testSimulatePipelineVerbose() throws IOException { + testSimulatePipeline(true, false); + } + + public void testSimulatePipelineVerboseWithFailure() throws IOException { + testSimulatePipeline(true, true); + } + + private void testSimulatePipeline(boolean isVerbose, + boolean isFailure) throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentBuilder.builder(xContentType.xContent()); + String rankValue = isFailure ? "non-int" : Integer.toString(1234); + builder.startObject(); + { + builder.field("pipeline"); + buildRandomXContentPipeline(builder); + builder.startArray("docs"); + { + builder.startObject() + .field("_index", "index") + .field("_type", "doc") + .field("_id", "doc_" + 1) + .startObject("_source").field("foo", "rab_" + 1).field("rank", rankValue).endObject() + .endObject(); + } + builder.endArray(); + } + builder.endObject(); + + SimulatePipelineRequest request = new SimulatePipelineRequest( + BytesReference.bytes(builder), + builder.contentType() + ); + request.setVerbose(isVerbose); + SimulatePipelineResponse response = + execute(request, highLevelClient().ingest()::simulatePipeline, highLevelClient().ingest()::simulatePipelineAsync); + List results = response.getResults(); + assertEquals(1, results.size()); + if (isVerbose) { + assertThat(results.get(0), instanceOf(SimulateDocumentVerboseResult.class)); + SimulateDocumentVerboseResult verboseResult = (SimulateDocumentVerboseResult) results.get(0); + assertEquals(2, verboseResult.getProcessorResults().size()); + if (isFailure) { + assertNotNull(verboseResult.getProcessorResults().get(1).getFailure()); + assertThat(verboseResult.getProcessorResults().get(1).getFailure().getMessage(), + containsString("unable to convert [non-int] to integer")); + } else { + assertEquals( + verboseResult.getProcessorResults().get(0).getIngestDocument() + .getFieldValue("foo", String.class), + "bar" + ); + assertEquals( + Integer.valueOf(1234), + verboseResult.getProcessorResults().get(1).getIngestDocument() + .getFieldValue("rank", Integer.class) + ); + } + } else { + assertThat(results.get(0), instanceOf(SimulateDocumentBaseResult.class)); + SimulateDocumentBaseResult baseResult = (SimulateDocumentBaseResult)results.get(0); + if (isFailure) { + assertNotNull(baseResult.getFailure()); + assertThat(baseResult.getFailure().getMessage(), + containsString("unable to convert [non-int] to integer")); + } else { + assertNotNull(baseResult.getIngestDocument()); + assertEquals( + baseResult.getIngestDocument().getFieldValue("foo", String.class), + "bar" + ); + assertEquals( + Integer.valueOf(1234), + baseResult.getIngestDocument() + .getFieldValue("rank", Integer.class) + ); + } + } + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index eee37cea561b0..e8bbbf6f5fd0c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -74,6 +74,7 @@ import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; @@ -1534,6 +1535,34 @@ public void testDeletePipeline() { assertEquals(expectedParams, expectedRequest.getParameters()); } + public void testSimulatePipeline() throws IOException { + String pipelineId = randomBoolean() ? "some_pipeline_id" : null; + boolean verbose = randomBoolean(); + String json = "{\"pipeline\":{" + + "\"description\":\"_description\"," + + "\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]}," + + "\"docs\":[{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}]}"; + SimulatePipelineRequest request = new SimulatePipelineRequest( + new BytesArray(json.getBytes(StandardCharsets.UTF_8)), + XContentType.JSON + ); + request.setId(pipelineId); + request.setVerbose(verbose); + Map expectedParams = new HashMap<>(); + expectedParams.put("verbose", Boolean.toString(verbose)); + + Request expectedRequest = RequestConverters.simulatePipeline(request); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + endpoint.add("_ingest/pipeline"); + if (pipelineId != null && !pipelineId.isEmpty()) + endpoint.add(pipelineId); + endpoint.add("_simulate"); + assertEquals(endpoint.toString(), expectedRequest.getEndpoint()); + assertEquals(HttpPost.METHOD_NAME, expectedRequest.getMethod()); + assertEquals(expectedParams, expectedRequest.getParameters()); + assertToXContentBody(request, expectedRequest.getEntity()); + } + public void testClusterHealth() { ClusterHealthRequest healthRequest = new ClusterHealthRequest(); Map expectedParams = new HashMap<>(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java index f5bdc9f2f3ee5..c53ec2b5d7cc7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java @@ -25,6 +25,12 @@ import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; +import org.elasticsearch.action.ingest.SimulateDocumentResult; +import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; +import org.elasticsearch.action.ingest.SimulateProcessorResult; import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; @@ -277,4 +283,109 @@ public void onFailure(Exception e) { } } + public void testSimulatePipeline() throws IOException { + RestHighLevelClient client = highLevelClient(); + + { + // tag::simulate-pipeline-request + String source = + "{\"" + + "pipeline\":{" + + "\"description\":\"_description\"," + + "\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]" + + "}," + + "\"docs\":[" + + "{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"bar\"}}," + + "{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}" + + "]" + + "}"; + SimulatePipelineRequest request = new SimulatePipelineRequest( + new BytesArray(source.getBytes(StandardCharsets.UTF_8)), // <1> + XContentType.JSON // <2> + ); + // end::simulate-pipeline-request + + // tag::simulate-pipeline-request-pipeline-id + request.setId("my-pipeline-id"); // <1> + // end::simulate-pipeline-request-pipeline-id + + // For testing we set this back to null + request.setId(null); + + // tag::simulate-pipeline-request-verbose + request.setVerbose(true); // <1> + // end::simulate-pipeline-request-verbose + + // tag::simulate-pipeline-execute + SimulatePipelineResponse response = client.ingest().simulatePipeline(request, RequestOptions.DEFAULT); // <1> + // end::simulate-pipeline-execute + + // tag::simulate-pipeline-response + for (SimulateDocumentResult result: response.getResults()) { // <1> + if (request.isVerbose()) { + assert result instanceof SimulateDocumentVerboseResult; + SimulateDocumentVerboseResult verboseResult = (SimulateDocumentVerboseResult)result; // <2> + for (SimulateProcessorResult processorResult: verboseResult.getProcessorResults()) { // <3> + processorResult.getIngestDocument(); // <4> + processorResult.getFailure(); // <5> + } + } else { + assert result instanceof SimulateDocumentBaseResult; + SimulateDocumentBaseResult baseResult = (SimulateDocumentBaseResult)result; // <6> + baseResult.getIngestDocument(); // <7> + baseResult.getFailure(); // <8> + } + } + // end::simulate-pipeline-response + assert(response.getResults().size() > 0); + } + } + + public void testSimulatePipelineAsync() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + String source = + "{\"" + + "pipeline\":{" + + "\"description\":\"_description\"," + + "\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]" + + "}," + + "\"docs\":[" + + "{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"bar\"}}," + + "{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}" + + "]" + + "}"; + SimulatePipelineRequest request = new SimulatePipelineRequest( + new BytesArray(source.getBytes(StandardCharsets.UTF_8)), + XContentType.JSON + ); + + // tag::simulate-pipeline-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(SimulatePipelineResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::simulate-pipeline-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::simulate-pipeline-execute-async + client.ingest().simulatePipelineAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::simulate-pipeline-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + } diff --git a/docs/java-rest/high-level/ingest/simulate_pipeline.asciidoc b/docs/java-rest/high-level/ingest/simulate_pipeline.asciidoc new file mode 100644 index 0000000000000..9d1bbd06ceb26 --- /dev/null +++ b/docs/java-rest/high-level/ingest/simulate_pipeline.asciidoc @@ -0,0 +1,90 @@ +[[java-rest-high-ingest-simulate-pipeline]] +=== Simulate Pipeline API + +[[java-rest-high-ingest-simulate-pipeline-request]] +==== Simulate Pipeline Request + +A `SimulatePipelineRequest` requires a source and a `XContentType`. The source consists +of the request body. See the https://www.elastic.co/guide/en/elasticsearch/reference/master/simulate-pipeline-api.html[docs] +for more details on the request body. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-request] +-------------------------------------------------- +<1> The request body as a `ByteArray`. +<2> The XContentType for the request body supplied above. + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-request-pipeline-id] +-------------------------------------------------- +<1> You can either specify an existing pipeline to execute against the provided documents, or supply a +pipeline definition in the body of the request. This option sets the id for an existing pipeline. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-request-verbose] +-------------------------------------------------- +<1> To see the intermediate results of each processor in the simulate request, you can add the verbose parameter +to the request. + +[[java-rest-high-ingest-simulate-pipeline-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-execute] +-------------------------------------------------- +<1> Execute the request and get back the response in a `SimulatePipelineResponse` object. + +[[java-rest-high-ingest-simulate-pipeline-async]] +==== Asynchronous Execution + +The asynchronous execution of a simulate pipeline request requires both the `SimulatePipelineRequest` +instance and an `ActionListener` instance to be passed to the asynchronous +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-execute-async] +-------------------------------------------------- +<1> The `SimulatePipelineRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `SimulatePipelineResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-ingest-simulate-pipeline-response]] +==== Simulate Pipeline Response + +The returned `SimulatePipelineResponse` allows to retrieve information about the executed + operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-response] +-------------------------------------------------- +<1> Get results for each of the documents provided as instance of `List`. +<2> If the request was in verbose mode cast the response to `SimulateDocumentVerboseResult`. +<3> Check the result after each processor is applied. +<4> Get the ingest document for the result obtained in 3. +<5> Or get the failure for the result obtained in 3. +<6> Get the result as `SimulateDocumentBaseResult` if the result was not verbose. +<7> Get the ingest document for the result obtained in 6. +<8> Or get the failure for the result obtained in 6. diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 727088aa5737f..418eb528f8e00 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -123,10 +123,12 @@ The Java High Level REST Client supports the following Ingest APIs: * <> * <> * <> +* <> include::ingest/put_pipeline.asciidoc[] include::ingest/get_pipeline.asciidoc[] include::ingest/delete_pipeline.asciidoc[] +include::ingest/simulate_pipeline.asciidoc[] == Snapshot APIs diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java index c6252feea276c..f7f76a2bbca7d 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java @@ -19,13 +19,18 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.ingest.IngestDocument; import java.io.IOException; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + /** * Holds the end result of what a pipeline did to sample document provided via the simulate api. */ @@ -33,6 +38,33 @@ public final class SimulateDocumentBaseResult implements SimulateDocumentResult private final WriteableIngestDocument ingestDocument; private final Exception failure; + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "simulate_document_base_result", + true, + a -> { + if (a[1] == null) { + assert a[0] != null; + return new SimulateDocumentBaseResult(((WriteableIngestDocument)a[0]).getIngestDocument()); + } else { + assert a[0] == null; + return new SimulateDocumentBaseResult((ElasticsearchException)a[1]); + } + } + ); + static { + PARSER.declareObject( + optionalConstructorArg(), + WriteableIngestDocument.INGEST_DOC_PARSER, + new ParseField(WriteableIngestDocument.DOC_FIELD) + ); + PARSER.declareObject( + optionalConstructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + new ParseField("error") + ); + } + public SimulateDocumentBaseResult(IngestDocument ingestDocument) { this.ingestDocument = new WriteableIngestDocument(ingestDocument); failure = null; @@ -89,4 +121,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + public static SimulateDocumentBaseResult fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java index 21e802981850c..099e238f2d25e 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java @@ -18,21 +18,38 @@ */ package org.elasticsearch.action.ingest; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + /** * Holds the result of what a pipeline did to a sample document via the simulate api, but instead of {@link SimulateDocumentBaseResult} * this result class holds the intermediate result each processor did to the sample document. */ public final class SimulateDocumentVerboseResult implements SimulateDocumentResult { + public static final String PROCESSOR_RESULT_FIELD = "processor_results"; private final List processorResults; + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "simulate_document_verbose_result", + true, + a -> new SimulateDocumentVerboseResult((List)a[0]) + ); + static { + PARSER.declareObjectArray(constructorArg(), SimulateProcessorResult.PARSER, new ParseField(PROCESSOR_RESULT_FIELD)); + } + public SimulateDocumentVerboseResult(List processorResults) { this.processorResults = processorResults; } @@ -63,7 +80,7 @@ public List getProcessorResults() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.startArray("processor_results"); + builder.startArray(PROCESSOR_RESULT_FIELD); for (SimulateProcessorResult processorResult : processorResults) { processorResult.toXContent(builder, params); } @@ -71,4 +88,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + public static SimulateDocumentVerboseResult fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index 3aa697b8e997c..9a7d6bb7feea9 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -25,6 +25,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -42,7 +44,7 @@ import static org.elasticsearch.ingest.IngestDocument.MetaData; -public class SimulatePipelineRequest extends ActionRequest { +public class SimulatePipelineRequest extends ActionRequest implements ToXContentObject { private String id; private boolean verbose; @@ -126,6 +128,12 @@ public void writeTo(StreamOutput out) throws IOException { } } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.rawValue(source.streamInput(), xContentType); + return builder; + } + public static final class Fields { static final String PIPELINE = "pipeline"; static final String DOCS = "docs"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java index e9ea1a7750738..991e81a14553b 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java @@ -19,22 +19,90 @@ package org.elasticsearch.action.ingest; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + public class SimulatePipelineResponse extends ActionResponse implements ToXContentObject { private String pipelineId; private boolean verbose; private List results; + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "simulate_pipeline_response", + true, + a -> { + List results = (List)a[0]; + boolean verbose = false; + if (results.size() > 0) { + if (results.get(0) instanceof SimulateDocumentVerboseResult) { + verbose = true; + } + } + return new SimulatePipelineResponse(null, verbose, results); + } + ); + static { + PARSER.declareObjectArray( + constructorArg(), + (parser, context) -> { + Token token = parser.currentToken(); + ensureExpectedToken(Token.START_OBJECT, token, parser::getTokenLocation); + SimulateDocumentResult result = null; + while ((token = parser.nextToken()) != Token.END_OBJECT) { + ensureExpectedToken(token, Token.FIELD_NAME, parser::getTokenLocation); + String fieldName = parser.currentName(); + token = parser.nextToken(); + if (token == Token.START_ARRAY) { + if (fieldName.equals(SimulateDocumentVerboseResult.PROCESSOR_RESULT_FIELD)) { + List results = new ArrayList<>(); + while ((token = parser.nextToken()) == Token.START_OBJECT) { + results.add(SimulateProcessorResult.fromXContent(parser)); + } + ensureExpectedToken(Token.END_ARRAY, token, parser::getTokenLocation); + result = new SimulateDocumentVerboseResult(results); + } else { + parser.skipChildren(); + } + } else if (token.equals(Token.START_OBJECT)) { + switch (fieldName) { + case WriteableIngestDocument.DOC_FIELD: + result = new SimulateDocumentBaseResult( + WriteableIngestDocument.INGEST_DOC_PARSER.apply(parser, null).getIngestDocument() + ); + break; + case "error": + result = new SimulateDocumentBaseResult(ElasticsearchException.fromXContent(parser)); + break; + default: + parser.skipChildren(); + break; + } + } // else it is a value skip it + } + assert result != null; + return result; + }, + new ParseField(Fields.DOCUMENTS)); + } + public SimulatePipelineResponse() { } @@ -98,6 +166,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + public static SimulatePipelineResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + static final class Fields { static final String DOCUMENTS = "docs"; } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java index 386a00b391f3c..101ce7ec260e1 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java @@ -19,33 +19,91 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; import java.io.IOException; -class SimulateProcessorResult implements Writeable, ToXContentObject { +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class SimulateProcessorResult implements Writeable, ToXContentObject { + + private static final String IGNORED_ERROR_FIELD = "ignored_error"; private final String processorTag; private final WriteableIngestDocument ingestDocument; private final Exception failure; - SimulateProcessorResult(String processorTag, IngestDocument ingestDocument, Exception failure) { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser IGNORED_ERROR_PARSER = + new ConstructingObjectParser<>( + "ignored_error_parser", + true, + a -> (ElasticsearchException)a[0] + ); + static { + IGNORED_ERROR_PARSER.declareObject( + constructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + new ParseField("error") + ); + } + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "simulate_processor_result", + true, + a -> { + String processorTag = a[0] == null ? null : (String)a[0]; + IngestDocument document = a[1] == null ? null : ((WriteableIngestDocument)a[1]).getIngestDocument(); + Exception failure = null; + if (a[2] != null) { + failure = (ElasticsearchException)a[2]; + } else if (a[3] != null) { + failure = (ElasticsearchException)a[3]; + } + return new SimulateProcessorResult(processorTag, document, failure); + } + ); + static { + PARSER.declareString(optionalConstructorArg(), new ParseField(ConfigurationUtils.TAG_KEY)); + PARSER.declareObject( + optionalConstructorArg(), + WriteableIngestDocument.INGEST_DOC_PARSER, + new ParseField(WriteableIngestDocument.DOC_FIELD) + ); + PARSER.declareObject( + optionalConstructorArg(), + IGNORED_ERROR_PARSER, + new ParseField(IGNORED_ERROR_FIELD) + ); + PARSER.declareObject( + optionalConstructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + new ParseField("error") + ); + } + + public SimulateProcessorResult(String processorTag, IngestDocument ingestDocument, Exception failure) { this.processorTag = processorTag; this.ingestDocument = (ingestDocument == null) ? null : new WriteableIngestDocument(ingestDocument); this.failure = failure; } - SimulateProcessorResult(String processorTag, IngestDocument ingestDocument) { + public SimulateProcessorResult(String processorTag, IngestDocument ingestDocument) { this(processorTag, ingestDocument, null); } - SimulateProcessorResult(String processorTag, Exception failure) { + public SimulateProcessorResult(String processorTag, Exception failure) { this(processorTag, null, failure); } @@ -98,7 +156,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } if (failure != null && ingestDocument != null) { - builder.startObject("ignored_error"); + builder.startObject(IGNORED_ERROR_FIELD); ElasticsearchException.generateFailureXContent(builder, params, failure, true); builder.endObject(); } else if (failure != null) { @@ -112,4 +170,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + public static SimulateProcessorResult fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java index 87168cb7a9bba..2430868bb5909 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java @@ -20,24 +20,91 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.IngestDocument.MetaData; import java.io.IOException; import java.time.ZoneId; +import java.time.ZonedDateTime; import java.util.Date; +import java.util.HashMap; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + final class WriteableIngestDocument implements Writeable, ToXContentFragment { + static final String SOURCE_FIELD = "_source"; + static final String INGEST_FIELD = "_ingest"; + static final String DOC_FIELD = "doc"; private final IngestDocument ingestDocument; + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser INGEST_DOC_PARSER = + new ConstructingObjectParser<>( + "ingest_document", + true, + a -> { + HashMap sourceAndMetadata = new HashMap<>(); + sourceAndMetadata.put(MetaData.INDEX.getFieldName(), a[0]); + sourceAndMetadata.put(MetaData.TYPE.getFieldName(), a[1]); + sourceAndMetadata.put(MetaData.ID.getFieldName(), a[2]); + if (a[3] != null) { + sourceAndMetadata.put(MetaData.ROUTING.getFieldName(), a[3]); + } + if (a[4] != null) { + sourceAndMetadata.put(MetaData.VERSION.getFieldName(), a[4]); + } + if (a[5] != null) { + sourceAndMetadata.put(MetaData.VERSION_TYPE.getFieldName(), a[5]); + } + sourceAndMetadata.putAll((Map)a[6]); + return new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, (Map)a[7])); + } + ); + static { + INGEST_DOC_PARSER.declareString(constructorArg(), new ParseField(MetaData.INDEX.getFieldName())); + INGEST_DOC_PARSER.declareString(constructorArg(), new ParseField(MetaData.TYPE.getFieldName())); + INGEST_DOC_PARSER.declareString(constructorArg(), new ParseField(MetaData.ID.getFieldName())); + INGEST_DOC_PARSER.declareString(optionalConstructorArg(), new ParseField(MetaData.ROUTING.getFieldName())); + INGEST_DOC_PARSER.declareLong(optionalConstructorArg(), new ParseField(MetaData.VERSION.getFieldName())); + INGEST_DOC_PARSER.declareString(optionalConstructorArg(), new ParseField(MetaData.VERSION_TYPE.getFieldName())); + INGEST_DOC_PARSER.declareObject(constructorArg(), (p, c) -> p.map(), new ParseField(SOURCE_FIELD)); + INGEST_DOC_PARSER.declareObject( + constructorArg(), + (p, c) -> { + Map ingestMap = p.map(); + ingestMap.computeIfPresent( + "timestamp", + (k, o) -> ZonedDateTime.parse((String)o) + ); + return ingestMap; + }, + new ParseField(INGEST_FIELD) + ); + } + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "writeable_ingest_document", + true, + a -> (WriteableIngestDocument)a[0] + ); + static { + PARSER.declareObject(constructorArg(), INGEST_DOC_PARSER, new ParseField(DOC_FIELD)); + } + WriteableIngestDocument(IngestDocument ingestDocument) { assert ingestDocument != null; this.ingestDocument = ingestDocument; @@ -67,19 +134,25 @@ IngestDocument getIngestDocument() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject("doc"); - Map metadataMap = ingestDocument.extractMetadata(); + builder.startObject(DOC_FIELD); + Map metadataMap = ingestDocument.getMetadata(); for (Map.Entry metadata : metadataMap.entrySet()) { if (metadata.getValue() != null) { builder.field(metadata.getKey().getFieldName(), metadata.getValue().toString()); } } - builder.field("_source", ingestDocument.getSourceAndMetadata()); - builder.field("_ingest", ingestDocument.getIngestMetadata()); + Map source = IngestDocument.deepCopyMap(ingestDocument.getSourceAndMetadata()); + metadataMap.keySet().forEach(mD -> source.remove(mD.getFieldName())); + builder.field(SOURCE_FIELD, source); + builder.field(INGEST_FIELD, ingestDocument.getIngestMetadata()); builder.endObject(); return builder; } + public static WriteableIngestDocument fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + @Override public boolean equals(Object o) { if (this == o) { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index e31a97dc2c6ce..2bd842e72b107 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -570,6 +570,17 @@ public Map extractMetadata() { return metadataMap; } + /** + * Does the same thing as {@link #extractMetadata} but does not mutate the map. + */ + public Map getMetadata() { + Map metadataMap = new EnumMap<>(MetaData.class); + for (MetaData metaData : MetaData.values()) { + metadataMap.put(metaData, sourceAndMetadata.get(metaData.getFieldName())); + } + return metadataMap; + } + /** * Returns the available ingest metadata fields, by default only timestamp, but it is possible to set additional ones. * Use only for reading values, modify them instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)} @@ -588,7 +599,7 @@ public Map getSourceAndMetadata() { } @SuppressWarnings("unchecked") - private static Map deepCopyMap(Map source) { + public static Map deepCopyMap(Map source) { return (Map) deepCopy(source); } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResultTests.java new file mode 100644 index 0000000000000..bfa6c1eb9b8c3 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResultTests.java @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.ingest; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.StringJoiner; +import java.util.function.Predicate; +import java.util.function.Supplier; + +import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.elasticsearch.action.ingest.WriteableIngestDocumentTests.createRandomIngestDoc; + +public class SimulateDocumentBaseResultTests extends AbstractXContentTestCase { + + public void testSerialization() throws IOException { + boolean isFailure = randomBoolean(); + SimulateDocumentBaseResult simulateDocumentBaseResult = createTestInstance(isFailure); + + BytesStreamOutput out = new BytesStreamOutput(); + simulateDocumentBaseResult.writeTo(out); + StreamInput streamInput = out.bytes().streamInput(); + SimulateDocumentBaseResult otherSimulateDocumentBaseResult = new SimulateDocumentBaseResult(streamInput); + + if (isFailure) { + assertThat(otherSimulateDocumentBaseResult.getIngestDocument(), equalTo(simulateDocumentBaseResult.getIngestDocument())); + assertThat(otherSimulateDocumentBaseResult.getFailure(), instanceOf(IllegalArgumentException.class)); + IllegalArgumentException e = (IllegalArgumentException) otherSimulateDocumentBaseResult.getFailure(); + assertThat(e.getMessage(), equalTo("test")); + } else { + assertIngestDocument(otherSimulateDocumentBaseResult.getIngestDocument(), simulateDocumentBaseResult.getIngestDocument()); + } + } + + static SimulateDocumentBaseResult createTestInstance(boolean isFailure) { + SimulateDocumentBaseResult simulateDocumentBaseResult; + if (isFailure) { + simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test")); + } else { + IngestDocument ingestDocument = createRandomIngestDoc(); + simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument); + } + return simulateDocumentBaseResult; + } + + private static SimulateDocumentBaseResult createTestInstanceWithFailures() { + return createTestInstance(randomBoolean()); + } + + @Override + protected SimulateDocumentBaseResult createTestInstance() { + return createTestInstance(false); + } + + @Override + protected SimulateDocumentBaseResult doParseInstance(XContentParser parser) { + return SimulateDocumentBaseResult.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } + + public static void assertEqualDocs(SimulateDocumentBaseResult response, SimulateDocumentBaseResult parsedResponse) { + assertEquals(response.getIngestDocument(), parsedResponse.getIngestDocument()); + if (response.getFailure() != null) { + assertNotNull(parsedResponse.getFailure()); + assertThat( + parsedResponse.getFailure().getMessage(), + containsString(response.getFailure().getMessage()) + ); + } else { + assertNull(parsedResponse.getFailure()); + } + } + + @Override + public void assertEqualInstances(SimulateDocumentBaseResult response, SimulateDocumentBaseResult parsedResponse) { + assertEqualDocs(response, parsedResponse); + } + + /** + * Test parsing {@link SimulateDocumentBaseResult} with inner failures as they don't support asserting on xcontent + * equivalence, given that exceptions are not parsed back as the same original class. We run the usual + * {@link AbstractXContentTestCase#testFromXContent()} without failures, and this other test with failures where + * we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = SimulateDocumentBaseResultTests::createTestInstanceWithFailures; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields(), + getShuffleFieldsExceptions(), getRandomFieldsExcludeFilter(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, getToXContentParams()); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java deleted file mode 100644 index 83aad26f6a07b..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.ingest; - -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; - -import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.instanceOf; - -public class SimulateDocumentSimpleResultTests extends ESTestCase { - - public void testSerialization() throws IOException { - boolean isFailure = randomBoolean(); - SimulateDocumentBaseResult simulateDocumentBaseResult; - if (isFailure) { - simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test")); - } else { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument); - } - - BytesStreamOutput out = new BytesStreamOutput(); - simulateDocumentBaseResult.writeTo(out); - StreamInput streamInput = out.bytes().streamInput(); - SimulateDocumentBaseResult otherSimulateDocumentBaseResult = new SimulateDocumentBaseResult(streamInput); - - if (isFailure) { - assertThat(otherSimulateDocumentBaseResult.getIngestDocument(), equalTo(simulateDocumentBaseResult.getIngestDocument())); - assertThat(otherSimulateDocumentBaseResult.getFailure(), instanceOf(IllegalArgumentException.class)); - IllegalArgumentException e = (IllegalArgumentException) otherSimulateDocumentBaseResult.getFailure(); - assertThat(e.getMessage(), equalTo("test")); - } else { - assertIngestDocument(otherSimulateDocumentBaseResult.getIngestDocument(), simulateDocumentBaseResult.getIngestDocument()); - } - } -} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java new file mode 100644 index 0000000000000..5701bcc27800f --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.ingest; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.StringJoiner; +import java.util.function.Predicate; +import java.util.function.Supplier; + +public class SimulateDocumentVerboseResultTests extends AbstractXContentTestCase { + + static SimulateDocumentVerboseResult createTestInstance(boolean withFailures) { + int numDocs = randomIntBetween(0, 10); + List results = new ArrayList<>(); + for (int i = 0; i getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } + + /** + * Test parsing {@link SimulateDocumentVerboseResult} with inner failures as they don't support asserting on xcontent + * equivalence, given that exceptions are not parsed back as the same original class. We run the usual + * {@link AbstractXContentTestCase#testFromXContent()} without failures, and this other test with failures where we + * disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = SimulateDocumentVerboseResultTests::createTestInstanceWithFailures; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields(), + getShuffleFieldsExceptions(), getRandomFieldsExcludeFilter(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, getToXContentParams()); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java index be448a09db892..60bad4aad460f 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java @@ -21,57 +21,29 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.StringJoiner; +import java.util.function.Predicate; +import java.util.function.Supplier; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.nullValue; -public class SimulatePipelineResponseTests extends ESTestCase { +public class SimulatePipelineResponseTests extends AbstractXContentTestCase { public void testSerialization() throws IOException { boolean isVerbose = randomBoolean(); String id = randomBoolean() ? randomAlphaOfLengthBetween(1, 10) : null; - int numResults = randomIntBetween(1, 10); - List results = new ArrayList<>(numResults); - for (int i = 0; i < numResults; i++) { - boolean isFailure = randomBoolean(); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - if (isVerbose) { - int numProcessors = randomIntBetween(1, 10); - List processorResults = new ArrayList<>(numProcessors); - for (int j = 0; j < numProcessors; j++) { - String processorTag = randomAlphaOfLengthBetween(1, 10); - SimulateProcessorResult processorResult; - if (isFailure) { - processorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); - } else { - processorResult = new SimulateProcessorResult(processorTag, ingestDocument); - } - processorResults.add(processorResult); - } - results.add(new SimulateDocumentVerboseResult(processorResults)); - } else { - results.add(new SimulateDocumentBaseResult(ingestDocument)); - SimulateDocumentBaseResult simulateDocumentBaseResult; - if (isFailure) { - simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test")); - } else { - simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument); - } - results.add(simulateDocumentBaseResult); - } - } - SimulatePipelineResponse response = new SimulatePipelineResponse(id, isVerbose, results); + SimulatePipelineResponse response = createInstance(id, isVerbose, true); BytesStreamOutput out = new BytesStreamOutput(); response.writeTo(out); StreamInput streamInput = out.bytes().streamInput(); @@ -120,4 +92,97 @@ public void testSerialization() throws IOException { } } } + + static SimulatePipelineResponse createInstance(String pipelineId, boolean isVerbose, boolean withFailure) { + int numResults = randomIntBetween(1, 10); + List results = new ArrayList<>(numResults); + for (int i = 0; i < numResults; i++) { + if (isVerbose) { + results.add( + SimulateDocumentVerboseResultTests.createTestInstance(withFailure) + ); + } else { + results.add( + SimulateDocumentBaseResultTests.createTestInstance(withFailure && randomBoolean()) + ); + } + } + return new SimulatePipelineResponse(pipelineId, isVerbose, results); + } + + private static SimulatePipelineResponse createTestInstanceWithFailures() { + boolean isVerbose = randomBoolean(); + return createInstance(null, isVerbose, false); + } + + @Override + protected SimulatePipelineResponse createTestInstance() { + boolean isVerbose = randomBoolean(); + // since the pipeline id is not serialized with XContent we set it to null for equality tests. + // we test failures separately since comparing XContent is not possible with failures + return createInstance(null, isVerbose, false); + } + + @Override + protected SimulatePipelineResponse doParseInstance(XContentParser parser) { + return SimulatePipelineResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected void assertEqualInstances(SimulatePipelineResponse response, + SimulatePipelineResponse parsedResponse) { + assertEquals(response.getPipelineId(), parsedResponse.getPipelineId()); + assertEquals(response.isVerbose(), parsedResponse.isVerbose()); + assertEquals(response.getResults().size(), parsedResponse.getResults().size()); + for (int i=0; i < response.getResults().size(); i++) { + if (response.isVerbose()) { + assertThat(response.getResults().get(i), instanceOf(SimulateDocumentVerboseResult.class)); + assertThat(parsedResponse.getResults().get(i), instanceOf(SimulateDocumentVerboseResult.class)); + SimulateDocumentVerboseResult responseResult = (SimulateDocumentVerboseResult)response.getResults().get(i); + SimulateDocumentVerboseResult parsedResult = (SimulateDocumentVerboseResult)parsedResponse.getResults().get(i); + SimulateDocumentVerboseResultTests.assertEqualDocs(responseResult, parsedResult); + } else { + assertThat(response.getResults().get(i), instanceOf(SimulateDocumentBaseResult.class)); + assertThat(parsedResponse.getResults().get(i), instanceOf(SimulateDocumentBaseResult.class)); + SimulateDocumentBaseResult responseResult = (SimulateDocumentBaseResult)response.getResults().get(i); + SimulateDocumentBaseResult parsedResult = (SimulateDocumentBaseResult)parsedResponse.getResults().get(i); + SimulateDocumentBaseResultTests.assertEqualDocs(responseResult, parsedResult); + } + } + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } + + /** + * Test parsing {@link SimulatePipelineResponse} with inner failures as they don't support asserting on xcontent equivalence, given that + * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} + * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = SimulatePipelineResponseTests::createTestInstanceWithFailures; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields(), getShuffleFieldsExceptions(), + getRandomFieldsExcludeFilter(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, getToXContentParams()); + } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java index 3014a1a4ae61d..2e0d6a75749bb 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java @@ -21,35 +21,29 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; +import java.util.StringJoiner; +import java.util.function.Predicate; +import java.util.function.Supplier; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; +import static org.elasticsearch.action.ingest.WriteableIngestDocumentTests.createRandomIngestDoc; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -public class SimulateProcessorResultTests extends ESTestCase { +public class SimulateProcessorResultTests extends AbstractXContentTestCase { public void testSerialization() throws IOException { - String processorTag = randomAlphaOfLengthBetween(1, 10); boolean isSuccessful = randomBoolean(); boolean isIgnoredException = randomBoolean(); - SimulateProcessorResult simulateProcessorResult; - if (isSuccessful) { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - if (isIgnoredException) { - simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument, new IllegalArgumentException("test")); - } else { - simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument); - } - } else { - simulateProcessorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); - } + SimulateProcessorResult simulateProcessorResult = createTestInstance(isSuccessful, isIgnoredException); BytesStreamOutput out = new BytesStreamOutput(); simulateProcessorResult.writeTo(out); @@ -72,4 +66,96 @@ public void testSerialization() throws IOException { assertThat(e.getMessage(), equalTo("test")); } } + + static SimulateProcessorResult createTestInstance(boolean isSuccessful, + boolean isIgnoredException) { + String processorTag = randomAlphaOfLengthBetween(1, 10); + SimulateProcessorResult simulateProcessorResult; + if (isSuccessful) { + IngestDocument ingestDocument = createRandomIngestDoc(); + if (isIgnoredException) { + simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument, new IllegalArgumentException("test")); + } else { + simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument); + } + } else { + simulateProcessorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); + } + return simulateProcessorResult; + } + + private static SimulateProcessorResult createTestInstanceWithFailures() { + boolean isSuccessful = randomBoolean(); + boolean isIgnoredException = randomBoolean(); + return createTestInstance(isSuccessful, isIgnoredException); + } + + @Override + protected SimulateProcessorResult createTestInstance() { + // we test failures separately since comparing XContent is not possible with failures + return createTestInstance(true, false); + } + + @Override + protected SimulateProcessorResult doParseInstance(XContentParser parser) { + return SimulateProcessorResult.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.startsWith( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.startsWith( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } + + static void assertEqualProcessorResults(SimulateProcessorResult response, + SimulateProcessorResult parsedResponse) { + assertEquals(response.getProcessorTag(), parsedResponse.getProcessorTag()); + assertEquals(response.getIngestDocument(), parsedResponse.getIngestDocument()); + if (response.getFailure() != null ) { + assertNotNull(parsedResponse.getFailure()); + assertThat( + parsedResponse.getFailure().getMessage(), + containsString(response.getFailure().getMessage()) + ); + } else { + assertNull(parsedResponse.getFailure()); + } + } + + @Override + protected void assertEqualInstances(SimulateProcessorResult response, SimulateProcessorResult parsedResponse) { + assertEqualProcessorResults(response, parsedResponse); + } + + /** + * Test parsing {@link SimulateProcessorResult} with inner failures as they don't support asserting on xcontent equivalence, given that + * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} + * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = SimulateProcessorResultTests::createTestInstanceWithFailures; + //with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, + //but that does not bother our assertions, as we only want to test that we don't break. + boolean supportsUnknownFields = true; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, + getShuffleFieldsExceptions(), getRandomFieldsExcludeFilter(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, getToXContentParams()); + } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java b/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java index 4d8e0f544c458..bc4589ff5d36c 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java @@ -25,14 +25,19 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.test.RandomObjects; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.StringJoiner; +import java.util.function.Predicate; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; @@ -40,7 +45,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; -public class WriteableIngestDocumentTests extends ESTestCase { +public class WriteableIngestDocumentTests extends AbstractXContentTestCase { public void testEqualsAndHashcode() throws Exception { Map sourceAndMetadata = RandomDocumentPicks.randomSource(random()); @@ -147,4 +152,42 @@ public void testToXContent() throws IOException { IngestDocument serializedIngestDocument = new IngestDocument(toXContentSource, toXContentIngestMetadata); assertThat(serializedIngestDocument, equalTo(serializedIngestDocument)); } + + static IngestDocument createRandomIngestDoc() { + XContentType xContentType = randomFrom(XContentType.values()); + BytesReference sourceBytes = RandomObjects.randomSource(random(), xContentType); + Map randomSource = XContentHelper.convertToMap(sourceBytes, false, xContentType).v2(); + return RandomDocumentPicks.randomIngestDocument(random(), randomSource); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected WriteableIngestDocument createTestInstance() { + return new WriteableIngestDocument(createRandomIngestDoc()); + } + + @Override + protected WriteableIngestDocument doParseInstance(XContentParser parser) { + return WriteableIngestDocument.fromXContent(parser); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.startsWith( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.startsWith( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } } From 8ae2049889766e4c8cbe67bd3f0d1d9998c542a5 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 Jun 2018 13:46:48 +0200 Subject: [PATCH 72/92] Avoid deprecation warning when running the ML datafeed extractor. (#31463) In #29639 we added a `format` option to doc-value fields and deprecated usage of doc-value fields without a format so that we could migrate doc-value fields to use the format that comes with the mappings by default. However I missed to fix the machine-learning datafeed extractor. --- .../ml/datafeed/extractor/scroll/ExtractedField.java | 8 +++++++- .../ml/datafeed/extractor/scroll/ScrollDataExtractor.java | 8 +++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedField.java index c2d866563d638..ef0dffa269114 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedField.java @@ -103,7 +103,13 @@ public Object[] value(SearchHit hit) { if (value.length != 1) { return value; } - value[0] = ((BaseDateTime) value[0]).getMillis(); + if (value[0] instanceof String) { // doc_value field with the epoch_millis format + value[0] = Long.parseLong((String) value[0]); + } else if (value[0] instanceof BaseDateTime) { // script field + value[0] = ((BaseDateTime) value[0]).getMillis(); + } else { + throw new IllegalStateException("Unexpected value for a time field: " + value[0].getClass()); + } return value; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java index bbd9f9ad533aa..57681a0aafbb2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java @@ -20,6 +20,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.StoredFieldsContext; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; @@ -47,6 +48,7 @@ class ScrollDataExtractor implements DataExtractor { private static final Logger LOGGER = Loggers.getLogger(ScrollDataExtractor.class); private static final TimeValue SCROLL_TIMEOUT = new TimeValue(30, TimeUnit.MINUTES); + private static final String EPOCH_MILLIS_FORMAT = "epoch_millis"; private final Client client; private final ScrollDataExtractorContext context; @@ -115,7 +117,11 @@ private SearchRequestBuilder buildSearchRequest(long start) { context.query, context.extractedFields.timeField(), start, context.end)); for (String docValueField : context.extractedFields.getDocValueFields()) { - searchRequestBuilder.addDocValueField(docValueField); + if (docValueField.equals(context.extractedFields.timeField())) { + searchRequestBuilder.addDocValueField(docValueField, EPOCH_MILLIS_FORMAT); + } else { + searchRequestBuilder.addDocValueField(docValueField, DocValueFieldsContext.USE_DEFAULT_FORMAT); + } } String[] sourceFields = context.extractedFields.getSourceFields(); if (sourceFields.length == 0) { From f22f91c57a6199ddadb19b2bf839d3ac7c3e2fbd Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Fri, 22 Jun 2018 15:31:23 +0200 Subject: [PATCH 73/92] Allow multiple unicast host providers (#31509) Introduces support for multiple host providers, which allows the settings based hosts resolver to be treated just as any other UnicastHostsProvider. Also introduces the notion of a HostsResolver so that plugins such as FileBasedDiscovery do not need to create their own thread pool for resolving hosts, making it easier to add new similar kind of plugins. --- .../classic/AzureUnicastHostsProvider.java | 2 +- .../ec2/AwsEc2UnicastHostsProvider.java | 2 +- .../discovery/ec2/Ec2DiscoveryTests.java | 8 +- .../file/FileBasedDiscoveryPlugin.java | 48 +----------- .../file/FileBasedUnicastHostsProvider.java | 34 +-------- .../FileBasedUnicastHostsProviderTests.java | 12 ++- .../gce/GceUnicastHostsProvider.java | 2 +- .../discovery/gce/GceDiscoveryTests.java | 2 +- .../common/settings/ClusterSettings.java | 3 +- .../discovery/DiscoveryModule.java | 47 ++++++++---- .../zen/SettingsBasedHostsProvider.java | 75 +++++++++++++++++++ .../discovery/zen/UnicastHostsProvider.java | 12 ++- .../discovery/zen/UnicastZenPing.java | 64 ++++++---------- .../discovery/DiscoveryModuleTests.java | 36 ++++++++- .../single/SingleNodeDiscoveryIT.java | 2 +- .../discovery/zen/UnicastZenPingTests.java | 45 ++++++----- .../discovery/zen/ZenDiscoveryUnitTests.java | 2 +- .../discovery/MockUncasedHostProvider.java | 2 +- .../test/discovery/TestZenDiscovery.java | 4 +- 19 files changed, 224 insertions(+), 178 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java index 482dafb008fc5..1a9265de2a72f 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java @@ -132,7 +132,7 @@ public AzureUnicastHostsProvider(Settings settings, AzureComputeService azureCom * Setting `cloud.azure.refresh_interval` to `0` will disable caching (default). */ @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { if (refreshInterval.millis() != 0) { if (dynamicHosts != null && (refreshInterval.millis() < 0 || (System.currentTimeMillis() - lastRefresh) < refreshInterval.millis())) { diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java index 396e9f707d404..8f5037042986b 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java @@ -92,7 +92,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos } @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { return dynamicHosts.getOrRefresh(); } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index 9dc2e02edc1b5..295df0c818a91 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -93,7 +93,7 @@ protected List buildDynamicHosts(Settings nodeSettings, int no protected List buildDynamicHosts(Settings nodeSettings, int nodes, List> tagsList) { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY, nodes, tagsList)) { AwsEc2UnicastHostsProvider provider = new AwsEc2UnicastHostsProvider(nodeSettings, transportService, plugin.ec2Service); - List dynamicHosts = provider.buildDynamicHosts(); + List dynamicHosts = provider.buildDynamicHosts(null); logger.debug("--> addresses found: {}", dynamicHosts); return dynamicHosts; } catch (IOException e) { @@ -307,7 +307,7 @@ protected List fetchDynamicNodes() { } }; for (int i=0; i<3; i++) { - provider.buildDynamicHosts(); + provider.buildDynamicHosts(null); } assertThat(provider.fetchCount, is(3)); } @@ -324,12 +324,12 @@ protected List fetchDynamicNodes() { } }; for (int i=0; i<3; i++) { - provider.buildDynamicHosts(); + provider.buildDynamicHosts(null); } assertThat(provider.fetchCount, is(1)); Thread.sleep(1_000L); // wait for cache to expire for (int i=0; i<3; i++) { - provider.buildDynamicHosts(); + provider.buildDynamicHosts(null); } assertThat(provider.fetchCount, is(2)); } diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java index fb37b3bc01104..4d26447078597 100644 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java @@ -19,35 +19,17 @@ package org.elasticsearch.discovery.file; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.discovery.zen.UnicastHostsProvider; -import org.elasticsearch.discovery.zen.UnicastZenPing; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.node.Node; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.watcher.ResourceWatcherService; -import java.io.IOException; import java.nio.file.Path; -import java.util.Collection; import java.util.Collections; import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.TimeUnit; import java.util.function.Supplier; /** @@ -57,47 +39,19 @@ */ public class FileBasedDiscoveryPlugin extends Plugin implements DiscoveryPlugin { - private static final Logger logger = Loggers.getLogger(FileBasedDiscoveryPlugin.class); - private final Settings settings; private final Path configPath; - private ExecutorService fileBasedDiscoveryExecutorService; public FileBasedDiscoveryPlugin(Settings settings, Path configPath) { this.settings = settings; this.configPath = configPath; } - @Override - public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, ScriptService scriptService, - NamedXContentRegistry xContentRegistry, Environment environment, - NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { - final int concurrentConnects = UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING.get(settings); - final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(settings, "[file_based_discovery_resolve]"); - fileBasedDiscoveryExecutorService = EsExecutors.newScaling( - Node.NODE_NAME_SETTING.get(settings) + "/" + "file_based_discovery_resolve", - 0, - concurrentConnects, - 60, - TimeUnit.SECONDS, - threadFactory, - threadPool.getThreadContext()); - - return Collections.emptyList(); - } - - @Override - public void close() throws IOException { - ThreadPool.terminate(fileBasedDiscoveryExecutorService, 0, TimeUnit.SECONDS); - } - @Override public Map> getZenHostsProviders(TransportService transportService, NetworkService networkService) { return Collections.singletonMap( "file", - () -> new FileBasedUnicastHostsProvider( - new Environment(settings, configPath), transportService, fileBasedDiscoveryExecutorService)); + () -> new FileBasedUnicastHostsProvider(new Environment(settings, configPath))); } } diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java index 7abcb4454720c..584ae4de5a2b5 100644 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java @@ -23,26 +23,19 @@ import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.env.Environment; -import org.elasticsearch.transport.TransportService; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; -import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.concurrent.ExecutorService; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.discovery.zen.UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT; -import static org.elasticsearch.discovery.zen.UnicastZenPing.resolveHostsLists; - /** * An implementation of {@link UnicastHostsProvider} that reads hosts/ports * from {@link #UNICAST_HOSTS_FILE}. @@ -59,23 +52,15 @@ class FileBasedUnicastHostsProvider extends AbstractComponent implements Unicast static final String UNICAST_HOSTS_FILE = "unicast_hosts.txt"; - private final TransportService transportService; - private final ExecutorService executorService; - private final Path unicastHostsFilePath; - private final TimeValue resolveTimeout; - - FileBasedUnicastHostsProvider(Environment environment, TransportService transportService, ExecutorService executorService) { + FileBasedUnicastHostsProvider(Environment environment) { super(environment.settings()); - this.transportService = transportService; - this.executorService = executorService; this.unicastHostsFilePath = environment.configFile().resolve("discovery-file").resolve(UNICAST_HOSTS_FILE); - this.resolveTimeout = DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT.get(settings); } @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { List hostsList; try (Stream lines = Files.lines(unicastHostsFilePath)) { hostsList = lines.filter(line -> line.startsWith("#") == false) // lines starting with `#` are comments @@ -90,21 +75,8 @@ public List buildDynamicHosts() { hostsList = Collections.emptyList(); } - final List dynamicHosts = new ArrayList<>(); - try { - dynamicHosts.addAll(resolveHostsLists( - executorService, - logger, - hostsList, - 1, - transportService, - resolveTimeout)); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - + final List dynamicHosts = hostsResolver.resolveHosts(hostsList, 1); logger.debug("[discovery-file] Using dynamic discovery nodes {}", dynamicHosts); - return dynamicHosts; } diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java index 860d3537635d5..5837d3bcdfe3f 100644 --- a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java +++ b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java @@ -24,7 +24,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.discovery.zen.UnicastZenPing; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -123,8 +125,10 @@ public void testUnicastHostsDoesNotExist() throws Exception { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); final Environment environment = TestEnvironment.newEnvironment(settings); - final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(environment, transportService, executorService); - final List addresses = provider.buildDynamicHosts(); + final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(environment); + final List addresses = provider.buildDynamicHosts((hosts, limitPortCounts) -> + UnicastZenPing.resolveHostsLists(executorService, logger, hosts, limitPortCounts, transportService, + TimeValue.timeValueSeconds(10))); assertEquals(0, addresses.size()); } @@ -163,6 +167,8 @@ private List setupAndRunHostProvider(final List hostEn } return new FileBasedUnicastHostsProvider( - new Environment(settings, configPath), transportService, executorService).buildDynamicHosts(); + new Environment(settings, configPath)).buildDynamicHosts((hosts, limitPortCounts) -> + UnicastZenPing.resolveHostsLists(executorService, logger, hosts, limitPortCounts, transportService, + TimeValue.timeValueSeconds(10))); } } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index 790d70a8b99b0..778c38697c5ec 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -93,7 +93,7 @@ public GceUnicastHostsProvider(Settings settings, GceInstancesService gceInstanc * Information can be cached using `cloud.gce.refresh_interval` property if needed. */ @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { // We check that needed properties have been set if (this.project == null || this.project.isEmpty() || this.zones == null || this.zones.isEmpty()) { throw new IllegalArgumentException("one or more gce discovery settings are missing. " + diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java index a1944a15d8036..816152186e761 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java @@ -108,7 +108,7 @@ protected List buildDynamicNodes(GceInstancesServiceImpl gceIn GceUnicastHostsProvider provider = new GceUnicastHostsProvider(nodeSettings, gceInstancesService, transportService, new NetworkService(Collections.emptyList())); - List dynamicHosts = provider.buildDynamicHosts(); + List dynamicHosts = provider.buildDynamicHosts(null); logger.info("--> addresses found: {}", dynamicHosts); return dynamicHosts; } diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index e616613a425a9..478325c66f983 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -56,6 +56,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ElectMasterService; import org.elasticsearch.discovery.zen.FaultDetection; +import org.elasticsearch.discovery.zen.SettingsBasedHostsProvider; import org.elasticsearch.discovery.zen.UnicastZenPing; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.env.Environment; @@ -357,7 +358,7 @@ public void apply(Settings value, Settings current, Settings previous) { ZenDiscovery.MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING, ZenDiscovery.MASTER_ELECTION_IGNORE_NON_MASTER_PINGS_SETTING, ZenDiscovery.MAX_PENDING_CLUSTER_STATES_SETTING, - UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING, + SettingsBasedHostsProvider.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING, UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING, UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT, SearchService.DEFAULT_KEEPALIVE_SETTING, diff --git a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index 179692cd516c8..e47fe7a7a70ed 100644 --- a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -31,7 +31,9 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.single.SingleNodeDiscovery; +import org.elasticsearch.discovery.zen.SettingsBasedHostsProvider; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.plugins.DiscoveryPlugin; @@ -42,13 +44,15 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; +import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Supplier; +import java.util.stream.Collectors; /** * A module for loading classes for node discovery. @@ -57,8 +61,8 @@ public class DiscoveryModule { public static final Setting DISCOVERY_TYPE_SETTING = new Setting<>("discovery.type", "zen", Function.identity(), Property.NodeScope); - public static final Setting> DISCOVERY_HOSTS_PROVIDER_SETTING = - new Setting<>("discovery.zen.hosts_provider", (String)null, Optional::ofNullable, Property.NodeScope); + public static final Setting> DISCOVERY_HOSTS_PROVIDER_SETTING = + Setting.listSetting("discovery.zen.hosts_provider", Collections.emptyList(), Function.identity(), Property.NodeScope); private final Discovery discovery; @@ -66,9 +70,9 @@ public DiscoveryModule(Settings settings, ThreadPool threadPool, TransportServic NamedWriteableRegistry namedWriteableRegistry, NetworkService networkService, MasterService masterService, ClusterApplier clusterApplier, ClusterSettings clusterSettings, List plugins, AllocationService allocationService) { - final UnicastHostsProvider hostsProvider; final Collection> joinValidators = new ArrayList<>(); - Map> hostProviders = new HashMap<>(); + final Map> hostProviders = new HashMap<>(); + hostProviders.put("settings", () -> new SettingsBasedHostsProvider(settings, transportService)); for (DiscoveryPlugin plugin : plugins) { plugin.getZenHostsProviders(transportService, networkService).entrySet().forEach(entry -> { if (hostProviders.put(entry.getKey(), entry.getValue()) != null) { @@ -80,17 +84,32 @@ public DiscoveryModule(Settings settings, ThreadPool threadPool, TransportServic joinValidators.add(joinValidator); } } - Optional hostsProviderName = DISCOVERY_HOSTS_PROVIDER_SETTING.get(settings); - if (hostsProviderName.isPresent()) { - Supplier hostsProviderSupplier = hostProviders.get(hostsProviderName.get()); - if (hostsProviderSupplier == null) { - throw new IllegalArgumentException("Unknown zen hosts provider [" + hostsProviderName.get() + "]"); - } - hostsProvider = Objects.requireNonNull(hostsProviderSupplier.get()); - } else { - hostsProvider = Collections::emptyList; + List hostsProviderNames = DISCOVERY_HOSTS_PROVIDER_SETTING.get(settings); + // for bwc purposes, add settings provider even if not explicitly specified + if (hostsProviderNames.contains("settings") == false) { + List extendedHostsProviderNames = new ArrayList<>(); + extendedHostsProviderNames.add("settings"); + extendedHostsProviderNames.addAll(hostsProviderNames); + hostsProviderNames = extendedHostsProviderNames; + } + + final Set missingProviderNames = new HashSet<>(hostsProviderNames); + missingProviderNames.removeAll(hostProviders.keySet()); + if (missingProviderNames.isEmpty() == false) { + throw new IllegalArgumentException("Unknown zen hosts providers " + missingProviderNames); } + List filteredHostsProviders = hostsProviderNames.stream() + .map(hostProviders::get).map(Supplier::get).collect(Collectors.toList()); + + final UnicastHostsProvider hostsProvider = hostsResolver -> { + final List addresses = new ArrayList<>(); + for (UnicastHostsProvider provider : filteredHostsProviders) { + addresses.addAll(provider.buildDynamicHosts(hostsResolver)); + } + return Collections.unmodifiableList(addresses); + }; + Map> discoveryTypes = new HashMap<>(); discoveryTypes.put("zen", () -> new ZenDiscovery(settings, threadPool, transportService, namedWriteableRegistry, masterService, clusterApplier, diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java new file mode 100644 index 0000000000000..6d6453c776e68 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.zen; + +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.transport.TransportService; + +import java.util.List; +import java.util.function.Function; + +import static java.util.Collections.emptyList; + +/** + * An implementation of {@link UnicastHostsProvider} that reads hosts/ports + * from the "discovery.zen.ping.unicast.hosts" node setting. If the port is + * left off an entry, a default port of 9300 is assumed. + * + * An example unicast hosts setting might look as follows: + * [67.81.244.10, 67.81.244.11:9305, 67.81.244.15:9400] + */ +public class SettingsBasedHostsProvider extends AbstractComponent implements UnicastHostsProvider { + + public static final Setting> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = + Setting.listSetting("discovery.zen.ping.unicast.hosts", emptyList(), Function.identity(), Setting.Property.NodeScope); + + // these limits are per-address + public static final int LIMIT_FOREIGN_PORTS_COUNT = 1; + public static final int LIMIT_LOCAL_PORTS_COUNT = 5; + + private final List configuredHosts; + + private final int limitPortCounts; + + public SettingsBasedHostsProvider(Settings settings, TransportService transportService) { + super(settings); + + if (DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.exists(settings)) { + configuredHosts = DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.get(settings); + // we only limit to 1 address, makes no sense to ping 100 ports + limitPortCounts = LIMIT_FOREIGN_PORTS_COUNT; + } else { + // if unicast hosts are not specified, fill with simple defaults on the local machine + configuredHosts = transportService.getLocalAddresses(); + limitPortCounts = LIMIT_LOCAL_PORTS_COUNT; + } + + logger.debug("using initial hosts {}", configuredHosts); + } + + @Override + public List buildDynamicHosts(HostsResolver hostsResolver) { + return hostsResolver.resolveHosts(configuredHosts, limitPortCounts); + } + +} diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java index d719f9d123b8c..86410005c92bf 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java @@ -31,5 +31,15 @@ public interface UnicastHostsProvider { /** * Builds the dynamic list of unicast hosts to be used for unicast discovery. */ - List buildDynamicHosts(); + List buildDynamicHosts(HostsResolver hostsResolver); + + /** + * Helper object that allows to resolve a list of hosts to a list of transport addresses. + * Each host is resolved into a transport address (or a collection of addresses if the + * number of ports is greater than one) + */ + interface HostsResolver { + List resolveHosts(List hosts, int limitPortCounts); + } + } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index cbadbb4a1e09b..9c86fa17e9b06 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -82,11 +82,9 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; -import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; -import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; @@ -94,26 +92,15 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { public static final String ACTION_NAME = "internal:discovery/zen/unicast"; - public static final Setting> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = - Setting.listSetting("discovery.zen.ping.unicast.hosts", emptyList(), Function.identity(), - Property.NodeScope); public static final Setting DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING = Setting.intSetting("discovery.zen.ping.unicast.concurrent_connects", 10, 0, Property.NodeScope); public static final Setting DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT = Setting.positiveTimeSetting("discovery.zen.ping.unicast.hosts.resolve_timeout", TimeValue.timeValueSeconds(5), Property.NodeScope); - // these limits are per-address - public static final int LIMIT_FOREIGN_PORTS_COUNT = 1; - public static final int LIMIT_LOCAL_PORTS_COUNT = 5; - private final ThreadPool threadPool; private final TransportService transportService; private final ClusterName clusterName; - private final List configuredHosts; - - private final int limitPortCounts; - private final PingContextProvider contextProvider; private final AtomicInteger pingingRoundIdGenerator = new AtomicInteger(); @@ -141,19 +128,10 @@ public UnicastZenPing(Settings settings, ThreadPool threadPool, TransportService this.contextProvider = contextProvider; final int concurrentConnects = DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING.get(settings); - if (DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.exists(settings)) { - configuredHosts = DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.get(settings); - // we only limit to 1 addresses, makes no sense to ping 100 ports - limitPortCounts = LIMIT_FOREIGN_PORTS_COUNT; - } else { - // if unicast hosts are not specified, fill with simple defaults on the local machine - configuredHosts = transportService.getLocalAddresses(); - limitPortCounts = LIMIT_LOCAL_PORTS_COUNT; - } + resolveTimeout = DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT.get(settings); logger.debug( - "using initial hosts {}, with concurrent_connects [{}], resolve_timeout [{}]", - configuredHosts, + "using concurrent_connects [{}], resolve_timeout [{}]", concurrentConnects, resolveTimeout); @@ -172,9 +150,9 @@ public UnicastZenPing(Settings settings, ThreadPool threadPool, TransportService } /** - * Resolves a list of hosts to a list of discovery nodes. Each host is resolved into a transport address (or a collection of addresses - * if the number of ports is greater than one) and the transport addresses are used to created discovery nodes. Host lookups are done - * in parallel using specified executor service up to the specified resolve timeout. + * Resolves a list of hosts to a list of transport addresses. Each host is resolved into a transport address (or a collection of + * addresses if the number of ports is greater than one). Host lookups are done in parallel using specified executor service up + * to the specified resolve timeout. * * @param executorService the executor service used to parallelize hostname lookups * @param logger logger used for logging messages regarding hostname lookups @@ -190,7 +168,7 @@ public static List resolveHostsLists( final List hosts, final int limitPortCounts, final TransportService transportService, - final TimeValue resolveTimeout) throws InterruptedException { + final TimeValue resolveTimeout) { Objects.requireNonNull(executorService); Objects.requireNonNull(logger); Objects.requireNonNull(hosts); @@ -205,8 +183,13 @@ public static List resolveHostsLists( .stream() .map(hn -> (Callable) () -> transportService.addressesFromString(hn, limitPortCounts)) .collect(Collectors.toList()); - final List> futures = - executorService.invokeAll(callables, resolveTimeout.nanos(), TimeUnit.NANOSECONDS); + final List> futures; + try { + futures = executorService.invokeAll(callables, resolveTimeout.nanos(), TimeUnit.NANOSECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return Collections.emptyList(); + } final List transportAddresses = new ArrayList<>(); final Set localAddresses = new HashSet<>(); localAddresses.add(transportService.boundAddress().publishAddress()); @@ -232,6 +215,9 @@ public static List resolveHostsLists( assert e.getCause() != null; final String message = "failed to resolve host [" + hostname + "]"; logger.warn(message, e.getCause()); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + // ignore } } else { logger.warn("timed out after [{}] resolving host [{}]", resolveTimeout, hostname); @@ -240,6 +226,11 @@ public static List resolveHostsLists( return Collections.unmodifiableList(transportAddresses); } + private UnicastHostsProvider.HostsResolver createHostsResolver() { + return (hosts, limitPortCounts) -> resolveHostsLists(unicastZenPingExecutorService, logger, hosts, + limitPortCounts, transportService, resolveTimeout); + } + @Override public void close() { ThreadPool.terminate(unicastZenPingExecutorService, 10, TimeUnit.SECONDS); @@ -281,18 +272,7 @@ protected void ping(final Consumer resultsConsumer, final TimeValue scheduleDuration, final TimeValue requestDuration) { final List seedAddresses = new ArrayList<>(); - try { - seedAddresses.addAll(resolveHostsLists( - unicastZenPingExecutorService, - logger, - configuredHosts, - limitPortCounts, - transportService, - resolveTimeout)); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - seedAddresses.addAll(hostsProvider.buildDynamicHosts()); + seedAddresses.addAll(hostsProvider.buildDynamicHosts(createHostsResolver())); final DiscoveryNodes nodes = contextProvider.clusterState().nodes(); // add all possible master nodes that were active in the last known cluster configuration for (ObjectCursor masterNode : nodes.getMasterNodes().values()) { diff --git a/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java index 18829d515973d..f2491b2db1f9a 100644 --- a/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java @@ -137,11 +137,10 @@ public void testDuplicateDiscovery() { public void testHostsProvider() { Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "custom").build(); - final UnicastHostsProvider provider = Collections::emptyList; AtomicBoolean created = new AtomicBoolean(false); DummyHostsProviderPlugin plugin = () -> Collections.singletonMap("custom", () -> { created.set(true); - return Collections::emptyList; + return hostsResolver -> Collections.emptyList(); }); newModule(settings, Collections.singletonList(plugin)); assertTrue(created.get()); @@ -151,7 +150,7 @@ public void testUnknownHostsProvider() { Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "dne").build(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> newModule(settings, Collections.emptyList())); - assertEquals("Unknown zen hosts provider [dne]", e.getMessage()); + assertEquals("Unknown zen hosts providers [dne]", e.getMessage()); } public void testDuplicateHostsProvider() { @@ -162,6 +161,37 @@ public void testDuplicateHostsProvider() { assertEquals("Cannot register zen hosts provider [dup] twice", e.getMessage()); } + public void testSettingsHostsProvider() { + DummyHostsProviderPlugin plugin = () -> Collections.singletonMap("settings", () -> null); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + newModule(Settings.EMPTY, Arrays.asList(plugin))); + assertEquals("Cannot register zen hosts provider [settings] twice", e.getMessage()); + } + + public void testMultiHostsProvider() { + AtomicBoolean created1 = new AtomicBoolean(false); + DummyHostsProviderPlugin plugin1 = () -> Collections.singletonMap("provider1", () -> { + created1.set(true); + return hostsResolver -> Collections.emptyList(); + }); + AtomicBoolean created2 = new AtomicBoolean(false); + DummyHostsProviderPlugin plugin2 = () -> Collections.singletonMap("provider2", () -> { + created2.set(true); + return hostsResolver -> Collections.emptyList(); + }); + AtomicBoolean created3 = new AtomicBoolean(false); + DummyHostsProviderPlugin plugin3 = () -> Collections.singletonMap("provider3", () -> { + created3.set(true); + return hostsResolver -> Collections.emptyList(); + }); + Settings settings = Settings.builder().putList(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), + "provider1", "provider3").build(); + newModule(settings, Arrays.asList(plugin1, plugin2, plugin3)); + assertTrue(created1.get()); + assertFalse(created2.get()); + assertTrue(created3.get()); + } + public void testLazyConstructionHostsProvider() { DummyHostsProviderPlugin plugin = () -> Collections.singletonMap("custom", () -> { diff --git a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java index 33c87ea7f383e..c3ffbb82081b7 100644 --- a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java @@ -84,7 +84,7 @@ public void testDoesNotRespondToZenPings() throws Exception { internalCluster().getInstance(TransportService.class); // try to ping the single node directly final UnicastHostsProvider provider = - () -> Collections.singletonList(nodeTransport.getLocalNode().getAddress()); + hostsResolver -> Collections.singletonList(nodeTransport.getLocalNode().getAddress()); final CountDownLatch latch = new CountDownLatch(1); final DiscoveryNodes nodes = DiscoveryNodes.builder() .add(nodeTransport.getLocalNode()) diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index 4aa75077431e7..eef926a1e1238 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -137,8 +137,6 @@ public void tearDown() throws Exception { } } - private static final UnicastHostsProvider EMPTY_HOSTS_PROVIDER = Collections::emptyList; - public void testSimplePings() throws IOException, InterruptedException, ExecutionException { // use ephemeral ports final Settings settings = Settings.builder().put("cluster.name", "test").put(TcpTransport.PORT.getKey(), 0).build(); @@ -182,7 +180,7 @@ public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfil final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build(); final ClusterState stateMismatch = ClusterState.builder(new ClusterName("mismatch")).version(randomNonNegativeLong()).build(); - Settings hostsSettings = Settings.builder() + final Settings hostsSettings = Settings.builder() .putList("discovery.zen.ping.unicast.hosts", NetworkAddress.format(new InetSocketAddress(handleA.address.address().getAddress(), handleA.address.address().getPort())), NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort())), @@ -196,22 +194,21 @@ public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfil .blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) .nodes(DiscoveryNodes.builder().add(handleA.node).localNodeId("UZP_A")) .build(); - TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA); + TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, () -> stateA); zenPingA.start(); closeables.push(zenPingA); ClusterState stateB = ClusterState.builder(state) .nodes(DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")) .build(); - TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, () -> stateB); zenPingB.start(); closeables.push(zenPingB); ClusterState stateC = ClusterState.builder(stateMismatch) .nodes(DiscoveryNodes.builder().add(handleC.node).localNodeId("UZP_C")) .build(); - TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleC, - EMPTY_HOSTS_PROVIDER, () -> stateC) { + TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleC, () -> stateC) { @Override protected Version getVersion() { return versionD; @@ -223,8 +220,7 @@ protected Version getVersion() { ClusterState stateD = ClusterState.builder(stateMismatch) .nodes(DiscoveryNodes.builder().add(handleD.node).localNodeId("UZP_D")) .build(); - TestUnicastZenPing zenPingD = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleD, - EMPTY_HOSTS_PROVIDER, () -> stateD); + TestUnicastZenPing zenPingD = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleD, () -> stateD); zenPingD.start(); closeables.push(zenPingD); @@ -329,21 +325,21 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi .blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) .nodes(DiscoveryNodes.builder().add(handleA.node).localNodeId("UZP_A")) .build(); - final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA); + final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, () -> stateA); zenPingA.start(); closeables.push(zenPingA); ClusterState stateB = ClusterState.builder(state) .nodes(DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")) .build(); - TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, () -> stateB); zenPingB.start(); closeables.push(zenPingB); ClusterState stateC = ClusterState.builder(state) .nodes(DiscoveryNodes.builder().add(handleC.node).localNodeId("UZP_C")) .build(); - TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettings, threadPool, handleC, EMPTY_HOSTS_PROVIDER, () -> stateC); + TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettings, threadPool, handleC, () -> stateC); zenPingC.start(); closeables.push(zenPingC); @@ -408,7 +404,7 @@ public BoundTransportAddress boundAddress() { Collections.emptySet()); closeables.push(transportService); final int limitPortCounts = randomIntBetween(1, 10); - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Collections.singletonList("127.0.0.1"), @@ -452,7 +448,7 @@ public BoundTransportAddress boundAddress() { new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); closeables.push(transportService); - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Collections.singletonList(NetworkAddress.format(loopbackAddress)), @@ -503,7 +499,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi Collections.emptySet()); closeables.push(transportService); - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList(hostname), @@ -562,7 +558,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi closeables.push(transportService); final TimeValue resolveTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 3)); try { - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList("hostname1", "hostname2"), @@ -610,6 +606,7 @@ public void testResolveReuseExistingNodeConnections() throws ExecutionException, hostsSettingsBuilder.put("discovery.zen.ping.unicast.hosts", (String) null); } final Settings hostsSettings = hostsSettingsBuilder.build(); + final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build(); // connection to reuse @@ -627,14 +624,14 @@ public void onConnectionOpened(Transport.Connection connection) { .blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) .nodes(DiscoveryNodes.builder().add(handleA.node).add(handleB.node).localNodeId("UZP_A")) .build(); - final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA); + final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, () -> stateA); zenPingA.start(); closeables.push(zenPingA); final ClusterState stateB = ClusterState.builder(state) .nodes(DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")) .build(); - TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, () -> stateB); zenPingB.start(); closeables.push(zenPingB); @@ -669,19 +666,20 @@ public void testPingingTemporalPings() throws ExecutionException, InterruptedExc .put("cluster.name", "test") .put("discovery.zen.ping.unicast.hosts", (String) null) // use nodes for simplicity .build(); + final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build(); final ClusterState stateA = ClusterState.builder(state) .blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) .nodes(DiscoveryNodes.builder().add(handleA.node).add(handleB.node).localNodeId("UZP_A")).build(); - final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA); + final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, () -> stateA); zenPingA.start(); closeables.push(zenPingA); // Node B doesn't know about A! final ClusterState stateB = ClusterState.builder(state).nodes( DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")).build(); - TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, () -> stateB); zenPingB.start(); closeables.push(zenPingB); @@ -728,7 +726,7 @@ public BoundTransportAddress boundAddress() { new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); closeables.push(transportService); - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList("127.0.0.1:9300:9300", "127.0.0.1:9301"), @@ -828,9 +826,10 @@ private static class NetworkHandle { private static class TestUnicastZenPing extends UnicastZenPing { TestUnicastZenPing(Settings settings, ThreadPool threadPool, NetworkHandle networkHandle, - UnicastHostsProvider unicastHostsProvider, PingContextProvider contextProvider) { + PingContextProvider contextProvider) { super(Settings.builder().put("node.name", networkHandle.node.getName()).put(settings).build(), - threadPool, networkHandle.transportService, unicastHostsProvider, contextProvider); + threadPool, networkHandle.transportService, + new SettingsBasedHostsProvider(settings, networkHandle.transportService), contextProvider); } volatile CountDownLatch allTasksCompleted; diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java index 9273ab1514372..a60a23bcd6d5c 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java @@ -317,7 +317,7 @@ public void onNewClusterState(String source, Supplier clusterState } }; ZenDiscovery zenDiscovery = new ZenDiscovery(settings, threadPool, service, new NamedWriteableRegistry(ClusterModule.getNamedWriteables()), - masterService, clusterApplier, clusterSettings, Collections::emptyList, ESAllocationTestCase.createAllocationService(), + masterService, clusterApplier, clusterSettings, hostsResolver -> Collections.emptyList(), ESAllocationTestCase.createAllocationService(), Collections.emptyList()); zenDiscovery.start(); return zenDiscovery; diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java index 2e60a3c518dd3..dc9304637cdca 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java @@ -56,7 +56,7 @@ public MockUncasedHostProvider(Supplier localNodeSupplier, Cluste } @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { final DiscoveryNode localNode = getNode(); assert localNode != null; synchronized (activeNodesPerCluster) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java index 11f9e38e665ff..5387a659aa274 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java @@ -45,7 +45,7 @@ import java.util.Map; import java.util.function.Supplier; -import static org.elasticsearch.discovery.zen.UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING; +import static org.elasticsearch.discovery.zen.SettingsBasedHostsProvider.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING; /** * A alternative zen discovery which allows using mocks for things like pings, as well as @@ -84,7 +84,7 @@ public Map> getZenHostsProviders(Transpor final Supplier supplier; if (USE_MOCK_PINGS.get(settings)) { // we have to return something in order for the unicast host provider setting to resolve to something. It will never be used - supplier = () -> () -> { + supplier = () -> hostsResolver -> { throw new UnsupportedOperationException(); }; } else { From c6cbc99f9c90c75e883393dc6b4691033ab73d72 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Fri, 22 Jun 2018 15:13:31 +0100 Subject: [PATCH 74/92] [ML] Add ML filter update API (#31437) This adds an api to allow updating a filter: POST _xpack/ml/filters/{filter_id}/_update The request body may have: - description: setting a new description - add_items: a list of the items to add - remove_items: a list of the items to remove This commit also changes the PUT filter api to error when the filter_id is already used. As now there is an api for updating filters, the put api should only be used to create new ones. Also, updating a filter results into a notification message auditing the change for every job that is using that filter. --- .../xpack/core/XPackClientPlugin.java | 2 + .../core/ml/action/UpdateFilterAction.java | 187 ++++++++++++++++++ .../xpack/core/ml/job/config/MlFilter.java | 13 +- .../xpack/core/ml/job/messages/Messages.java | 2 + .../autodetect/state/ModelSnapshot.java | 4 +- .../xpack/core/ml/utils/ExceptionsHelper.java | 4 + .../UpdateFilterActionRequestTests.java | 58 ++++++ .../core/ml/job/config/MlFilterTests.java | 9 +- .../xpack/ml/MachineLearning.java | 5 + .../ml/action/TransportGetFiltersAction.java | 9 +- .../ml/action/TransportPutFilterAction.java | 37 ++-- .../action/TransportUpdateFilterAction.java | 173 ++++++++++++++++ .../xpack/ml/job/JobManager.java | 53 +++-- .../persistence/BatchedBucketsIterator.java | 4 +- .../BatchedInfluencersIterator.java | 4 +- .../persistence/BatchedRecordsIterator.java | 4 +- .../xpack/ml/job/persistence/JobProvider.java | 19 +- .../rest/filter/RestUpdateFilterAction.java | 41 ++++ .../xpack/ml/job/JobManagerTests.java | 90 ++++++++- .../api/xpack.ml.update_filter.json | 20 ++ .../test/ml/custom_all_field.yml | 2 + .../test/ml/delete_model_snapshot.yml | 2 + .../rest-api-spec/test/ml/filter_crud.yml | 68 ++++++- .../test/ml/get_model_snapshots.yml | 3 + .../rest-api-spec/test/ml/index_layout.yml | 2 + .../rest-api-spec/test/ml/jobs_crud.yml | 4 + .../test/ml/jobs_get_result_buckets.yml | 3 + .../test/ml/jobs_get_result_categories.yml | 3 + .../test/ml/jobs_get_result_influencers.yml | 3 + .../ml/jobs_get_result_overall_buckets.yml | 9 + .../test/ml/jobs_get_result_records.yml | 2 + .../rest-api-spec/test/ml/jobs_get_stats.yml | 2 + .../test/ml/ml_anomalies_default_mappings.yml | 1 + .../test/ml/revert_model_snapshot.yml | 9 + .../test/ml/update_model_snapshot.yml | 2 + .../ml/integration/DetectionRulesIT.java | 12 +- .../smoke-test-ml-with-security/build.gradle | 1 + 37 files changed, 794 insertions(+), 72 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterActionRequestTests.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.update_filter.json diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 2894138248b8c..0bf6601593dee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -84,6 +84,7 @@ import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateDatafeedAction; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.UpdateProcessAction; @@ -220,6 +221,7 @@ public List getClientActions() { OpenJobAction.INSTANCE, GetFiltersAction.INSTANCE, PutFilterAction.INSTANCE, + UpdateFilterAction.INSTANCE, DeleteFilterAction.INSTANCE, KillProcessAction.INSTANCE, GetBucketsAction.INSTANCE, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java new file mode 100644 index 0000000000000..57b3d3457d736 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java @@ -0,0 +1,187 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Objects; +import java.util.SortedSet; +import java.util.TreeSet; + + +public class UpdateFilterAction extends Action { + + public static final UpdateFilterAction INSTANCE = new UpdateFilterAction(); + public static final String NAME = "cluster:admin/xpack/ml/filters/update"; + + private UpdateFilterAction() { + super(NAME); + } + + @Override + public PutFilterAction.Response newResponse() { + return new PutFilterAction.Response(); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + public static final ParseField ADD_ITEMS = new ParseField("add_items"); + public static final ParseField REMOVE_ITEMS = new ParseField("remove_items"); + + private static final ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); + + static { + PARSER.declareString((request, filterId) -> request.filterId = filterId, MlFilter.ID); + PARSER.declareStringOrNull(Request::setDescription, MlFilter.DESCRIPTION); + PARSER.declareStringArray(Request::setAddItems, ADD_ITEMS); + PARSER.declareStringArray(Request::setRemoveItems, REMOVE_ITEMS); + } + + public static Request parseRequest(String filterId, XContentParser parser) { + Request request = PARSER.apply(parser, null); + if (request.filterId == null) { + request.filterId = filterId; + } else if (!Strings.isNullOrEmpty(filterId) && !filterId.equals(request.filterId)) { + // If we have both URI and body filter ID, they must be identical + throw new IllegalArgumentException(Messages.getMessage(Messages.INCONSISTENT_ID, MlFilter.ID.getPreferredName(), + request.filterId, filterId)); + } + return request; + } + + private String filterId; + @Nullable + private String description; + private SortedSet addItems = Collections.emptySortedSet(); + private SortedSet removeItems = Collections.emptySortedSet(); + + public Request() { + } + + public Request(String filterId) { + this.filterId = ExceptionsHelper.requireNonNull(filterId, MlFilter.ID.getPreferredName()); + } + + public String getFilterId() { + return filterId; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public SortedSet getAddItems() { + return addItems; + } + + public void setAddItems(Collection addItems) { + this.addItems = new TreeSet<>(ExceptionsHelper.requireNonNull(addItems, ADD_ITEMS.getPreferredName())); + } + + public SortedSet getRemoveItems() { + return removeItems; + } + + public void setRemoveItems(Collection removeItems) { + this.removeItems = new TreeSet<>(ExceptionsHelper.requireNonNull(removeItems, REMOVE_ITEMS.getPreferredName())); + } + + public boolean isNoop() { + return description == null && addItems.isEmpty() && removeItems.isEmpty(); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + filterId = in.readString(); + description = in.readOptionalString(); + addItems = new TreeSet<>(Arrays.asList(in.readStringArray())); + removeItems = new TreeSet<>(Arrays.asList(in.readStringArray())); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(filterId); + out.writeOptionalString(description); + out.writeStringArray(addItems.toArray(new String[addItems.size()])); + out.writeStringArray(removeItems.toArray(new String[removeItems.size()])); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(MlFilter.ID.getPreferredName(), filterId); + if (description != null) { + builder.field(MlFilter.DESCRIPTION.getPreferredName(), description); + } + if (addItems.isEmpty() == false) { + builder.field(ADD_ITEMS.getPreferredName(), addItems); + } + if (removeItems.isEmpty() == false) { + builder.field(REMOVE_ITEMS.getPreferredName(), removeItems); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(filterId, description, addItems, removeItems); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(filterId, other.filterId) + && Objects.equals(description, other.description) + && Objects.equals(addItems, other.addItems) + && Objects.equals(removeItems, other.removeItems); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + public RequestBuilder(ElasticsearchClient client) { + super(client, INSTANCE, new Request()); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java index b11dfd476515c..b45ce73f124fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java @@ -56,7 +56,7 @@ private static ObjectParser createParser(boolean ignoreUnknownFie private final String description; private final SortedSet items; - public MlFilter(String id, String description, SortedSet items) { + private MlFilter(String id, String description, SortedSet items) { this.id = Objects.requireNonNull(id, ID.getPreferredName() + " must not be null"); this.description = description; this.items = Objects.requireNonNull(items, ITEMS.getPreferredName() + " must not be null"); @@ -69,8 +69,7 @@ public MlFilter(StreamInput in) throws IOException { } else { description = null; } - items = new TreeSet<>(); - items.addAll(Arrays.asList(in.readStringArray())); + items = new TreeSet<>(Arrays.asList(in.readStringArray())); } @Override @@ -163,9 +162,13 @@ public Builder setDescription(String description) { return this; } + public Builder setItems(SortedSet items) { + this.items = items; + return this; + } + public Builder setItems(List items) { - this.items = new TreeSet<>(); - this.items.addAll(items); + this.items = new TreeSet<>(items); return this; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index 79d8f068d91f8..f0329051fed95 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -42,6 +42,8 @@ public final class Messages { public static final String DATAFEED_FREQUENCY_MUST_BE_MULTIPLE_OF_AGGREGATIONS_INTERVAL = "Datafeed frequency [{0}] must be a multiple of the aggregation interval [{1}]"; + public static final String FILTER_NOT_FOUND = "No filter with id [{0}] exists"; + public static final String INCONSISTENT_ID = "Inconsistent {0}; ''{1}'' specified in the body differs from ''{2}'' specified as a URL argument"; public static final String INVALID_ID = "Invalid {0}; ''{1}'' can contain lowercase alphanumeric (a-z and 0-9), hyphens or " + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java index 1588298918e22..03487500d8a8b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java @@ -19,9 +19,9 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -345,7 +345,7 @@ public static String v54DocumentId(String jobId, String snapshotId) { public static ModelSnapshot fromJson(BytesReference bytesReference) { try (InputStream stream = bytesReference.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(bytesReference)) + XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { return LENIENT_PARSER.apply(parser, null).build(); } catch (IOException e) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java index 150c539b1ae3b..d5b83d25ce315 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java @@ -38,6 +38,10 @@ public static ElasticsearchException serverError(String msg, Throwable cause) { return new ElasticsearchException(msg, cause); } + public static ElasticsearchStatusException conflictStatusException(String msg, Throwable cause, Object... args) { + return new ElasticsearchStatusException(msg, RestStatus.CONFLICT, cause, args); + } + public static ElasticsearchStatusException conflictStatusException(String msg, Object... args) { return new ElasticsearchStatusException(msg, RestStatus.CONFLICT, args); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterActionRequestTests.java new file mode 100644 index 0000000000000..f07eba7e90ebb --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterActionRequestTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction.Request; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +public class UpdateFilterActionRequestTests extends AbstractStreamableXContentTestCase { + + private String filterId = randomAlphaOfLength(20); + + @Override + protected Request createTestInstance() { + UpdateFilterAction.Request request = new UpdateFilterAction.Request(filterId); + if (randomBoolean()) { + request.setDescription(randomAlphaOfLength(20)); + } + if (randomBoolean()) { + request.setAddItems(generateRandomStrings()); + } + if (randomBoolean()) { + request.setRemoveItems(generateRandomStrings()); + } + return request; + } + + private static Collection generateRandomStrings() { + int size = randomIntBetween(0, 10); + List strings = new ArrayList<>(size); + for (int i = 0; i < size; ++i) { + strings.add(randomAlphaOfLength(20)); + } + return strings; + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected Request createBlankInstance() { + return new Request(); + } + + @Override + protected Request doParseInstance(XContentParser parser) { + return Request.parseRequest(filterId, parser); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java index 9ac6683f004c5..c8d8527dc0158 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; +import java.util.SortedSet; import java.util.TreeSet; import static org.hamcrest.Matchers.contains; @@ -43,7 +44,7 @@ public static MlFilter createRandom(String filterId) { for (int i = 0; i < size; i++) { items.add(randomAlphaOfLengthBetween(1, 20)); } - return new MlFilter(filterId, description, items); + return MlFilter.builder(filterId).setDescription(description).setItems(items).build(); } @Override @@ -57,13 +58,13 @@ protected MlFilter doParseInstance(XContentParser parser) { } public void testNullId() { - NullPointerException ex = expectThrows(NullPointerException.class, () -> new MlFilter(null, "", new TreeSet<>())); + NullPointerException ex = expectThrows(NullPointerException.class, () -> MlFilter.builder(null).build()); assertEquals(MlFilter.ID.getPreferredName() + " must not be null", ex.getMessage()); } public void testNullItems() { - NullPointerException ex = - expectThrows(NullPointerException.class, () -> new MlFilter(randomAlphaOfLengthBetween(1, 20), "", null)); + NullPointerException ex = expectThrows(NullPointerException.class, + () -> MlFilter.builder(randomAlphaOfLength(20)).setItems((SortedSet) null).build()); assertEquals(MlFilter.ITEMS.getPreferredName() + " must not be null", ex.getMessage()); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index a1714a8e3f5db..3d1011c47e2a8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -97,6 +97,7 @@ import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateDatafeedAction; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.UpdateProcessAction; @@ -148,6 +149,7 @@ import org.elasticsearch.xpack.ml.action.TransportStopDatafeedAction; import org.elasticsearch.xpack.ml.action.TransportUpdateCalendarJobAction; import org.elasticsearch.xpack.ml.action.TransportUpdateDatafeedAction; +import org.elasticsearch.xpack.ml.action.TransportUpdateFilterAction; import org.elasticsearch.xpack.ml.action.TransportUpdateJobAction; import org.elasticsearch.xpack.ml.action.TransportUpdateModelSnapshotAction; import org.elasticsearch.xpack.ml.action.TransportUpdateProcessAction; @@ -196,6 +198,7 @@ import org.elasticsearch.xpack.ml.rest.filter.RestDeleteFilterAction; import org.elasticsearch.xpack.ml.rest.filter.RestGetFiltersAction; import org.elasticsearch.xpack.ml.rest.filter.RestPutFilterAction; +import org.elasticsearch.xpack.ml.rest.filter.RestUpdateFilterAction; import org.elasticsearch.xpack.ml.rest.job.RestCloseJobAction; import org.elasticsearch.xpack.ml.rest.job.RestDeleteJobAction; import org.elasticsearch.xpack.ml.rest.job.RestFlushJobAction; @@ -460,6 +463,7 @@ public List getRestHandlers(Settings settings, RestController restC new RestOpenJobAction(settings, restController), new RestGetFiltersAction(settings, restController), new RestPutFilterAction(settings, restController), + new RestUpdateFilterAction(settings, restController), new RestDeleteFilterAction(settings, restController), new RestGetInfluencersAction(settings, restController), new RestGetRecordsAction(settings, restController), @@ -511,6 +515,7 @@ public List getRestHandlers(Settings settings, RestController restC new ActionHandler<>(OpenJobAction.INSTANCE, TransportOpenJobAction.class), new ActionHandler<>(GetFiltersAction.INSTANCE, TransportGetFiltersAction.class), new ActionHandler<>(PutFilterAction.INSTANCE, TransportPutFilterAction.class), + new ActionHandler<>(UpdateFilterAction.INSTANCE, TransportUpdateFilterAction.class), new ActionHandler<>(DeleteFilterAction.INSTANCE, TransportDeleteFilterAction.class), new ActionHandler<>(KillProcessAction.INSTANCE, TransportKillProcessAction.class), new ActionHandler<>(GetBucketsAction.INSTANCE, TransportGetBucketsAction.class), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index 1be7be4a5d2b3..c8cd7a0d63bb7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -21,8 +21,8 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -80,9 +80,8 @@ public void onResponse(GetResponse getDocResponse) { if (getDocResponse.isExists()) { BytesReference docSource = getDocResponse.getSourceAsBytesRef(); try (InputStream stream = docSource.streamInput(); - XContentParser parser = - XContentFactory.xContent(getDocResponse.getSourceAsBytes()) - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { MlFilter filter = MlFilter.LENIENT_PARSER.apply(parser, null).build(); responseBody = new QueryPage<>(Collections.singletonList(filter), 1, MlFilter.RESULTS_FIELD); @@ -122,7 +121,7 @@ public void onResponse(SearchResponse response) { for (SearchHit hit : response.getHits().getHits()) { BytesReference docSource = hit.getSourceRef(); try (InputStream stream = docSource.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(docSource)).createParser( + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser( NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { docs.add(MlFilter.LENIENT_PARSER.apply(parser, null).build()); } catch (IOException e) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java index cb35daef8668c..011606f3c14ed 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -5,11 +5,12 @@ */ package org.elasticsearch.xpack.ml.action; +import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; @@ -19,12 +20,12 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PutFilterAction; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.job.JobManager; import java.io.IOException; import java.util.Collections; @@ -36,42 +37,44 @@ public class TransportPutFilterAction extends HandledTransportAction { private final Client client; - private final JobManager jobManager; @Inject - public TransportPutFilterAction(Settings settings, TransportService transportService, ActionFilters actionFilters, - Client client, JobManager jobManager) { + public TransportPutFilterAction(Settings settings, TransportService transportService, ActionFilters actionFilters, Client client) { super(settings, PutFilterAction.NAME, transportService, actionFilters, - (Supplier) PutFilterAction.Request::new); + (Supplier) PutFilterAction.Request::new); this.client = client; - this.jobManager = jobManager; } @Override protected void doExecute(PutFilterAction.Request request, ActionListener listener) { MlFilter filter = request.getFilter(); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); + indexRequest.opType(DocWriteRequest.OpType.CREATE); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")); indexRequest.source(filter.toXContent(builder, params)); } catch (IOException e) { throw new IllegalStateException("Failed to serialise filter with id [" + filter.getId() + "]", e); } - BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); - bulkRequestBuilder.add(indexRequest); - bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), - new ActionListener() { + executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, + new ActionListener() { @Override - public void onResponse(BulkResponse indexResponse) { - jobManager.updateProcessOnFilterChanged(filter); + public void onResponse(IndexResponse indexResponse) { listener.onResponse(new PutFilterAction.Response(filter)); } @Override public void onFailure(Exception e) { - listener.onFailure(ExceptionsHelper.serverError("Error putting filter with id [" + filter.getId() + "]", e)); + Exception reportedException; + if (e instanceof VersionConflictEngineException) { + reportedException = new ResourceAlreadyExistsException("A filter with id [" + filter.getId() + + "] already exists"); + } else { + reportedException = ExceptionsHelper.serverError("Error putting filter with id [" + filter.getId() + "]", e); + } + listener.onFailure(reportedException); } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java new file mode 100644 index 0000000000000..37f550fbb02ea --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java @@ -0,0 +1,173 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ml.MlMetaIndex; +import org.elasticsearch.xpack.core.ml.action.PutFilterAction; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.job.JobManager; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Collections; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +public class TransportUpdateFilterAction extends HandledTransportAction { + + private final Client client; + private final JobManager jobManager; + + @Inject + public TransportUpdateFilterAction(Settings settings, TransportService transportService, ActionFilters actionFilters, Client client, + JobManager jobManager) { + super(settings, UpdateFilterAction.NAME, transportService, actionFilters, + (Supplier) UpdateFilterAction.Request::new); + this.client = client; + this.jobManager = jobManager; + } + + @Override + protected void doExecute(UpdateFilterAction.Request request, ActionListener listener) { + ActionListener filterListener = ActionListener.wrap(filterWithVersion -> { + updateFilter(filterWithVersion, request, listener); + }, listener::onFailure); + + getFilterWithVersion(request.getFilterId(), filterListener); + } + + private void updateFilter(FilterWithVersion filterWithVersion, UpdateFilterAction.Request request, + ActionListener listener) { + MlFilter filter = filterWithVersion.filter; + + if (request.isNoop()) { + listener.onResponse(new PutFilterAction.Response(filter)); + return; + } + + String description = request.getDescription() == null ? filter.getDescription() : request.getDescription(); + SortedSet items = new TreeSet<>(filter.getItems()); + items.addAll(request.getAddItems()); + + // Check if removed items are present to avoid typos + for (String toRemove : request.getRemoveItems()) { + boolean wasPresent = items.remove(toRemove); + if (wasPresent == false) { + listener.onFailure(ExceptionsHelper.badRequestException("Cannot remove item [" + toRemove + + "] as it is not present in filter [" + filter.getId() + "]")); + return; + } + } + + MlFilter updatedFilter = MlFilter.builder(filter.getId()).setDescription(description).setItems(items).build(); + indexUpdatedFilter(updatedFilter, filterWithVersion.version, request, listener); + } + + private void indexUpdatedFilter(MlFilter filter, long version, UpdateFilterAction.Request request, + ActionListener listener) { + IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); + indexRequest.version(version); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")); + indexRequest.source(filter.toXContent(builder, params)); + } catch (IOException e) { + throw new IllegalStateException("Failed to serialise filter with id [" + filter.getId() + "]", e); + } + + executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + jobManager.notifyFilterChanged(filter, request.getAddItems(), request.getRemoveItems()); + listener.onResponse(new PutFilterAction.Response(filter)); + } + + @Override + public void onFailure(Exception e) { + Exception reportedException; + if (e instanceof VersionConflictEngineException) { + reportedException = ExceptionsHelper.conflictStatusException("Error updating filter with id [" + filter.getId() + + "] because it was modified while the update was in progress", e); + } else { + reportedException = ExceptionsHelper.serverError("Error updating filter with id [" + filter.getId() + "]", e); + } + listener.onFailure(reportedException); + } + }); + } + + private void getFilterWithVersion(String filterId, ActionListener listener) { + GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, MlFilter.documentId(filterId)); + executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener() { + @Override + public void onResponse(GetResponse getDocResponse) { + try { + if (getDocResponse.isExists()) { + BytesReference docSource = getDocResponse.getSourceAsBytesRef(); + try (InputStream stream = docSource.streamInput(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + MlFilter filter = MlFilter.LENIENT_PARSER.apply(parser, null).build(); + listener.onResponse(new FilterWithVersion(filter, getDocResponse.getVersion())); + } + } else { + this.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.FILTER_NOT_FOUND, filterId))); + } + } catch (Exception e) { + this.onFailure(e); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private static class FilterWithVersion { + + private final MlFilter filter; + private final long version; + + private FilterWithVersion(MlFilter filter, long version) { + this.filter = filter; + this.version = version; + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index fe6deea55e3aa..c3d31ae10e925 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -403,26 +403,55 @@ private ClusterState updateClusterState(Job job, boolean overwrite, ClusterState return buildNewClusterState(currentState, builder); } - public void updateProcessOnFilterChanged(MlFilter filter) { + public void notifyFilterChanged(MlFilter filter, Set addedItems, Set removedItems) { + if (addedItems.isEmpty() && removedItems.isEmpty()) { + return; + } + ClusterState clusterState = clusterService.state(); QueryPage jobs = expandJobs("*", true, clusterService.state()); for (Job job : jobs.results()) { - if (isJobOpen(clusterState, job.getId())) { - Set jobFilters = job.getAnalysisConfig().extractReferencedFilters(); - if (jobFilters.contains(filter.getId())) { - updateJobProcessNotifier.submitJobUpdate(UpdateParams.filterUpdate(job.getId(), filter), ActionListener.wrap( - isUpdated -> { - if (isUpdated) { - auditor.info(job.getId(), - Messages.getMessage(Messages.JOB_AUDIT_FILTER_UPDATED_ON_PROCESS, filter.getId())); - } - }, e -> {} - )); + Set jobFilters = job.getAnalysisConfig().extractReferencedFilters(); + if (jobFilters.contains(filter.getId())) { + if (isJobOpen(clusterState, job.getId())) { + updateJobProcessNotifier.submitJobUpdate(UpdateParams.filterUpdate(job.getId(), filter), + ActionListener.wrap(isUpdated -> { + auditFilterChanges(job.getId(), filter.getId(), addedItems, removedItems); + }, e -> {})); + } else { + auditFilterChanges(job.getId(), filter.getId(), addedItems, removedItems); } } } } + private void auditFilterChanges(String jobId, String filterId, Set addedItems, Set removedItems) { + StringBuilder auditMsg = new StringBuilder("Filter ["); + auditMsg.append(filterId); + auditMsg.append("] has been modified; "); + + if (addedItems.isEmpty() == false) { + auditMsg.append("added items: "); + appendCommaSeparatedSet(addedItems, auditMsg); + if (removedItems.isEmpty() == false) { + auditMsg.append(", "); + } + } + + if (removedItems.isEmpty() == false) { + auditMsg.append("removed items: "); + appendCommaSeparatedSet(removedItems, auditMsg); + } + + auditor.info(jobId, auditMsg.toString()); + } + + private static void appendCommaSeparatedSet(Set items, StringBuilder sb) { + sb.append("["); + Strings.collectionToDelimitedString(items, ", ", "'", "'", sb); + sb.append("]"); + } + public void updateProcessOnCalendarChanged(List calendarJobIds) { ClusterState clusterState = clusterService.state(); MlMetadata mlMetadata = MlMetadata.getMlMetadata(clusterState); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java index 17b4b8edadfa2..53526e2a4753d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.Result; @@ -30,7 +30,7 @@ class BatchedBucketsIterator extends BatchedResultsIterator { protected Result map(SearchHit hit) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)).createParser(NamedXContentRegistry.EMPTY, + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { Bucket bucket = Bucket.LENIENT_PARSER.apply(parser, null); return new Result<>(hit.getIndex(), bucket); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java index d084325350fc5..fe8bd3aaa3af7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.ml.job.results.Influencer; import org.elasticsearch.xpack.core.ml.job.results.Result; @@ -29,7 +29,7 @@ class BatchedInfluencersIterator extends BatchedResultsIterator { protected Result map(SearchHit hit) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)).createParser(NamedXContentRegistry.EMPTY, + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { Influencer influencer = Influencer.LENIENT_PARSER.apply(parser, null); return new Result<>(hit.getIndex(), influencer); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java index c0940dfd5aad1..22c107f771ba5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.Result; @@ -30,7 +30,7 @@ class BatchedRecordsIterator extends BatchedResultsIterator { protected Result map(SearchHit hit) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)).createParser(NamedXContentRegistry.EMPTY, + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)){ AnomalyRecord record = AnomalyRecord.LENIENT_PARSER.apply(parser, null); return new Result<>(hit.getIndex(), record); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java index 9db1877df1850..578ddd1efc78a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java @@ -50,7 +50,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; @@ -477,7 +476,7 @@ private T parseSearchHit(SearchHit hit, BiFunction Consumer errorHandler) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)) + XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { return objectParser.apply(parser, null); } catch (IOException e) { @@ -528,7 +527,7 @@ public void buckets(String jobId, BucketsQueryBuilder query, Consumer modelPlot(String jobId, int from, int size) { for (SearchHit hit : searchResponse.getHits().getHits()) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)) + XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { ModelPlot modelPlot = ModelPlot.LENIENT_PARSER.apply(parser, null); results.add(modelPlot); @@ -1232,10 +1231,8 @@ public void onResponse(GetResponse getDocResponse) { BytesReference docSource = getDocResponse.getSourceAsBytesRef(); try (InputStream stream = docSource.streamInput(); - XContentParser parser = - XContentFactory.xContent(XContentHelper.xContentType(docSource)) - .createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, stream)) { + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { Calendar calendar = Calendar.LENIENT_PARSER.apply(parser, null).build(); listener.onResponse(calendar); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java new file mode 100644 index 0000000000000..80acf3d7e4e35 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.rest.filter; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.ml.MachineLearning; + +import java.io.IOException; + +public class RestUpdateFilterAction extends BaseRestHandler { + + public RestUpdateFilterAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.POST, + MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}/_update", this); + } + + @Override + public String getName() { + return "xpack_ml_update_filter_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String filterId = restRequest.param(MlFilter.ID.getPreferredName()); + XContentParser parser = restRequest.contentOrSourceParamParser(); + UpdateFilterAction.Request putFilterRequest = UpdateFilterAction.Request.parseRequest(filterId, parser); + return channel -> client.execute(UpdateFilterAction.INSTANCE, putFilterRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java index 42b0a56f49a82..cf925963c198a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java @@ -41,12 +41,14 @@ import org.junit.Before; import org.mockito.ArgumentCaptor; import org.mockito.Matchers; +import org.mockito.Mockito; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; +import java.util.TreeSet; import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; import static org.elasticsearch.xpack.ml.action.TransportOpenJobActionTests.addJobTask; @@ -174,7 +176,16 @@ public void onFailure(Exception e) { }); } - public void testUpdateProcessOnFilterChanged() { + public void testNotifyFilterChangedGivenNoop() { + MlFilter filter = MlFilter.builder("my_filter").build(); + JobManager jobManager = createJobManager(); + + jobManager.notifyFilterChanged(filter, Collections.emptySet(), Collections.emptySet()); + + Mockito.verifyNoMoreInteractions(auditor, updateJobProcessNotifier); + } + + public void testNotifyFilterChanged() { Detector.Builder detectorReferencingFilter = new Detector.Builder("count", null); detectorReferencingFilter.setByFieldName("foo"); DetectionRule filterRule = new DetectionRule.Builder(RuleScope.builder().exclude("foo", "foo_filter")).build(); @@ -208,11 +219,18 @@ public void testUpdateProcessOnFilterChanged() { .build(); when(clusterService.state()).thenReturn(clusterState); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + listener.onResponse(true); + return null; + }).when(updateJobProcessNotifier).submitJobUpdate(any(), any()); + JobManager jobManager = createJobManager(); MlFilter filter = MlFilter.builder("foo_filter").setItems("a", "b").build(); - jobManager.updateProcessOnFilterChanged(filter); + jobManager.notifyFilterChanged(filter, new TreeSet<>(Arrays.asList("item 1", "item 2")), + new TreeSet<>(Collections.singletonList("item 3"))); ArgumentCaptor updateParamsCaptor = ArgumentCaptor.forClass(UpdateParams.class); verify(updateJobProcessNotifier, times(2)).submitJobUpdate(updateParamsCaptor.capture(), any(ActionListener.class)); @@ -223,6 +241,74 @@ public void testUpdateProcessOnFilterChanged() { assertThat(capturedUpdateParams.get(0).getFilter(), equalTo(filter)); assertThat(capturedUpdateParams.get(1).getJobId(), equalTo(jobReferencingFilter2.getId())); assertThat(capturedUpdateParams.get(1).getFilter(), equalTo(filter)); + + verify(auditor).info(jobReferencingFilter1.getId(), "Filter [foo_filter] has been modified; added items: " + + "['item 1', 'item 2'], removed items: ['item 3']"); + verify(auditor).info(jobReferencingFilter2.getId(), "Filter [foo_filter] has been modified; added items: " + + "['item 1', 'item 2'], removed items: ['item 3']"); + verify(auditor).info(jobReferencingFilter3.getId(), "Filter [foo_filter] has been modified; added items: " + + "['item 1', 'item 2'], removed items: ['item 3']"); + Mockito.verifyNoMoreInteractions(auditor, updateJobProcessNotifier); + } + + public void testNotifyFilterChangedGivenOnlyAddedItems() { + Detector.Builder detectorReferencingFilter = new Detector.Builder("count", null); + detectorReferencingFilter.setByFieldName("foo"); + DetectionRule filterRule = new DetectionRule.Builder(RuleScope.builder().exclude("foo", "foo_filter")).build(); + detectorReferencingFilter.setRules(Collections.singletonList(filterRule)); + AnalysisConfig.Builder filterAnalysisConfig = new AnalysisConfig.Builder(Collections.singletonList( + detectorReferencingFilter.build())); + + Job.Builder jobReferencingFilter = buildJobBuilder("job-referencing-filter"); + jobReferencingFilter.setAnalysisConfig(filterAnalysisConfig); + + MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); + mlMetadata.putJob(jobReferencingFilter.build(), false); + + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder() + .putCustom(MLMetadataField.TYPE, mlMetadata.build())) + .build(); + when(clusterService.state()).thenReturn(clusterState); + + JobManager jobManager = createJobManager(); + + MlFilter filter = MlFilter.builder("foo_filter").build(); + + jobManager.notifyFilterChanged(filter, new TreeSet<>(Arrays.asList("a", "b")), Collections.emptySet()); + + verify(auditor).info(jobReferencingFilter.getId(), "Filter [foo_filter] has been modified; added items: ['a', 'b']"); + Mockito.verifyNoMoreInteractions(auditor, updateJobProcessNotifier); + } + + public void testNotifyFilterChangedGivenOnlyRemovedItems() { + Detector.Builder detectorReferencingFilter = new Detector.Builder("count", null); + detectorReferencingFilter.setByFieldName("foo"); + DetectionRule filterRule = new DetectionRule.Builder(RuleScope.builder().exclude("foo", "foo_filter")).build(); + detectorReferencingFilter.setRules(Collections.singletonList(filterRule)); + AnalysisConfig.Builder filterAnalysisConfig = new AnalysisConfig.Builder(Collections.singletonList( + detectorReferencingFilter.build())); + + Job.Builder jobReferencingFilter = buildJobBuilder("job-referencing-filter"); + jobReferencingFilter.setAnalysisConfig(filterAnalysisConfig); + + MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); + mlMetadata.putJob(jobReferencingFilter.build(), false); + + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder() + .putCustom(MLMetadataField.TYPE, mlMetadata.build())) + .build(); + when(clusterService.state()).thenReturn(clusterState); + + JobManager jobManager = createJobManager(); + + MlFilter filter = MlFilter.builder("foo_filter").build(); + + jobManager.notifyFilterChanged(filter, Collections.emptySet(), new TreeSet<>(Arrays.asList("a", "b"))); + + verify(auditor).info(jobReferencingFilter.getId(), "Filter [foo_filter] has been modified; removed items: ['a', 'b']"); + Mockito.verifyNoMoreInteractions(auditor, updateJobProcessNotifier); } public void testUpdateProcessOnCalendarChanged() { diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.update_filter.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.update_filter.json new file mode 100644 index 0000000000000..06aceea4c1240 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.update_filter.json @@ -0,0 +1,20 @@ +{ + "xpack.ml.update_filter": { + "methods": [ "POST" ], + "url": { + "path": "/_xpack/ml/filters/{filter_id}/_update", + "paths": [ "/_xpack/ml/filters/{filter_id}/_update" ], + "parts": { + "filter_id": { + "type": "string", + "required": true, + "description": "The ID of the filter to update" + } + } + }, + "body": { + "description" : "The filter update", + "required" : true + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/custom_all_field.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/custom_all_field.yml index ffbbf4d95bdda..c206a08e6ca91 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/custom_all_field.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/custom_all_field.yml @@ -30,6 +30,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-custom-all-test-1 type: doc @@ -56,6 +57,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-custom-all-test-2 type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml index 1a587c47fd573..c13b2473cc785 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml @@ -34,6 +34,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-delete-model-snapshot type: doc @@ -76,6 +77,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-delete-model-snapshot type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml index 4c184d34c995e..d787e07b8c28c 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml @@ -4,6 +4,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-meta type: doc @@ -112,25 +113,25 @@ setup: "Test create filter api": - do: xpack.ml.put_filter: - filter_id: filter-foo2 + filter_id: new-filter body: > { "description": "A newly created filter", "items": ["abc", "xyz"] } - - match: { filter_id: filter-foo2 } + - match: { filter_id: new-filter } - match: { description: "A newly created filter" } - match: { items: ["abc", "xyz"]} - do: xpack.ml.get_filters: - filter_id: "filter-foo2" + filter_id: "new-filter" - match: { count: 1 } - match: filters.0: - filter_id: "filter-foo2" + filter_id: "new-filter" description: "A newly created filter" items: ["abc", "xyz"] @@ -146,6 +147,65 @@ setup: "items": ["abc", "xyz"] } +--- +"Test update filter given no filter matches filter_id": + - do: + catch: missing + xpack.ml.update_filter: + filter_id: "missing_filter" + body: > + { + } + +--- +"Test update filter": + - do: + xpack.ml.put_filter: + filter_id: "test_update_filter" + body: > + { + "description": "old description", + "items": ["a", "b"] + } + - match: { filter_id: test_update_filter } + + - do: + xpack.ml.update_filter: + filter_id: "test_update_filter" + body: > + { + "description": "new description", + "add_items": ["c", "d"], + "remove_items": ["a"] + } + - match: { filter_id: test_update_filter } + - match: { description: "new description" } + - match: { items: ["b", "c", "d"] } + + - do: + xpack.ml.get_filters: + filter_id: "test_update_filter" + - match: + filters.0: + filter_id: "test_update_filter" + description: "new description" + items: ["b", "c", "d"] + + - do: + xpack.ml.delete_filter: + filter_id: test_update_filter + +--- +"Test update filter given remove item is not present": + - do: + catch: /Cannot remove item \[not present item\] as it is not present in filter \[filter-foo\]/ + xpack.ml.update_filter: + filter_id: "filter-foo" + body: > + { + "remove_items": ["not present item"] + } + --- "Test delete in-use filter": - do: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_model_snapshots.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_model_snapshots.yml index 57cc80ae2fb73..e411251363b71 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_model_snapshots.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_model_snapshots.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-model-snapshots type: doc @@ -33,6 +34,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-state type: doc @@ -44,6 +46,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-model-snapshots type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml index c13ae86e06f50..6a60bbb96da6f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml @@ -556,6 +556,8 @@ - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json + index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml index df505176ae739..3b08753e20913 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml @@ -419,6 +419,8 @@ - match: { job_id: "jobs-crud-model-memory-limit-decrease" } - do: + headers: + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -929,6 +931,8 @@ "Test cannot create job with existing result document": - do: + headers: + Content-Type: application/json index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_buckets.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_buckets.yml index 2a7a7970e5db2..125f8cbf7f8d2 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_buckets.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_buckets.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-buckets type: doc @@ -34,6 +35,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-buckets type: doc @@ -50,6 +52,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-buckets type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_categories.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_categories.yml index 565f1612f89a2..307a1d0a80d7e 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_categories.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_categories.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-categories type: doc @@ -26,6 +27,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-categories type: doc @@ -34,6 +36,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-unrelated type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_influencers.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_influencers.yml index 50f0cfc6816bc..9b875fb1afd86 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_influencers.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_influencers.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-influencers-test type: doc @@ -36,6 +37,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-influencers-test type: doc @@ -55,6 +57,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-influencers-test type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_overall_buckets.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_overall_buckets.yml index 75f35f311177c..249ff7c72d7ad 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_overall_buckets.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_overall_buckets.yml @@ -59,6 +59,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -75,6 +76,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -91,6 +93,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -123,6 +126,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -139,6 +143,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -155,6 +160,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -171,6 +177,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -187,6 +194,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -203,6 +211,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_records.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_records.yml index b5dae2045f440..513e1fb875774 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_records.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_records.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-records type: doc @@ -34,6 +35,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-records type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml index 61bcf63e39869..b841c8c23069f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml @@ -226,6 +226,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -250,6 +251,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_anomalies_default_mappings.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_anomalies_default_mappings.yml index 42fca7b81a036..0f01613203704 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_anomalies_default_mappings.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_anomalies_default_mappings.yml @@ -19,6 +19,7 @@ - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/revert_model_snapshot.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/revert_model_snapshot.yml index a66c0da12d0a9..ce638fdceaa19 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/revert_model_snapshot.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/revert_model_snapshot.yml @@ -34,6 +34,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -61,6 +62,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -88,6 +90,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -103,6 +106,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -118,6 +122,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -133,6 +138,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -148,6 +154,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -163,6 +170,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -180,6 +188,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/update_model_snapshot.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/update_model_snapshot.yml index 6a1d6e117e924..9966ae668c08f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/update_model_snapshot.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/update_model_snapshot.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-update-model-snapshot type: doc @@ -67,6 +68,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-update-model-snapshot type: doc diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java index fbda8ad716b2c..7f018f967fbfd 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.DetectionRule; @@ -34,6 +35,7 @@ import java.util.Map; import java.util.Set; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isOneOf; @@ -177,10 +179,12 @@ public void testScope() throws Exception { assertThat(records.get(0).getOverFieldValue(), equalTo("333.333.333.333")); // Now let's update the filter - MlFilter updatedFilter = MlFilter.builder(safeIps.getId()).setItems("333.333.333.333").build(); - assertThat(putMlFilter(updatedFilter).getFilter(), equalTo(updatedFilter)); + UpdateFilterAction.Request updateFilterRequest = new UpdateFilterAction.Request(safeIps.getId()); + updateFilterRequest.setRemoveItems(safeIps.getItems()); + updateFilterRequest.setAddItems(Collections.singletonList("333.333.333.333")); + client().execute(UpdateFilterAction.INSTANCE, updateFilterRequest).get(); - // Wait until the notification that the process was updated is indexed + // Wait until the notification that the filter was updated is indexed assertBusy(() -> { SearchResponse searchResponse = client().prepareSearch(".ml-notifications") .setSize(1) @@ -191,7 +195,7 @@ public void testScope() throws Exception { ).get(); SearchHit[] hits = searchResponse.getHits().getHits(); assertThat(hits.length, equalTo(1)); - assertThat(hits[0].getSourceAsMap().get("message"), equalTo("Updated filter [safe_ips] in running process")); + assertThat((String) hits[0].getSourceAsMap().get("message"), containsString("Filter [safe_ips] has been modified")); }); long secondAnomalyTime = timestamp; diff --git a/x-pack/qa/smoke-test-ml-with-security/build.gradle b/x-pack/qa/smoke-test-ml-with-security/build.gradle index ebe55c2b7ef29..58e5eca3600f6 100644 --- a/x-pack/qa/smoke-test-ml-with-security/build.gradle +++ b/x-pack/qa/smoke-test-ml-with-security/build.gradle @@ -42,6 +42,7 @@ integTestRunner { 'ml/filter_crud/Test get filter API with bad ID', 'ml/filter_crud/Test invalid param combinations', 'ml/filter_crud/Test non-existing filter', + 'ml/filter_crud/Test update filter given remove item is not present', 'ml/get_datafeed_stats/Test get datafeed stats given missing datafeed_id', 'ml/get_datafeeds/Test get datafeed given missing datafeed_id', 'ml/jobs_crud/Test cannot create job with existing categorizer state document', From f023e95ae0ce4828085631b342d518c0e572a8aa Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 Jun 2018 16:17:17 +0200 Subject: [PATCH 75/92] Upgrade to Lucene 7.4.0. (#31529) This moves Elasticsearch from a recent 7.4.0 snapshot to the GA release. --- buildSrc/version.properties | 2 +- .../lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 | 1 + .../lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 | 1 + ...lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 | 1 + .../lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 | 1 + ...lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 | 1 + .../lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 | 1 + .../lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 | 1 + ...cene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 | 1 + .../lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 | 1 + .../lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 | 1 + server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-core-7.4.0.jar.sha1 | 1 + .../licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-grouping-7.4.0.jar.sha1 | 1 + .../lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-highlighter-7.4.0.jar.sha1 | 1 + server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-join-7.4.0.jar.sha1 | 1 + .../licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-memory-7.4.0.jar.sha1 | 1 + server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-misc-7.4.0.jar.sha1 | 1 + .../licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-queries-7.4.0.jar.sha1 | 1 + .../lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-queryparser-7.4.0.jar.sha1 | 1 + .../licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-sandbox-7.4.0.jar.sha1 | 1 + .../licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-spatial-7.4.0.jar.sha1 | 1 + .../lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 | 1 + .../lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-spatial3d-7.4.0.jar.sha1 | 1 + .../licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-suggest-7.4.0.jar.sha1 | 1 + .../licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 | 1 - x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 | 1 + 49 files changed, 25 insertions(+), 25 deletions(-) delete mode 100644 modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-core-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-grouping-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-join-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-memory-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-misc-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-queries-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-spatial-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-suggest-7.4.0.jar.sha1 delete mode 100644 x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index d89ffa78ed852..17e5cb5ff01f5 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0-alpha1 -lucene = 7.4.0-snapshot-518d303506 +lucene = 7.4.0 # optional dependencies spatial4j = 0.7 diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 2e666a2d566b0..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a57659a275921d8ab3f7ec580e9bf713ce6143b1 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..2b14a61f264fa --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 @@ -0,0 +1 @@ +9f0a326f7ec1671ffb07f95b27f1a5812b7dc1c3 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 03f1b7d27aed5..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b91a260d8d12ee4b3302a63059c73a34de0ce146 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..b5291b30c7de8 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 @@ -0,0 +1 @@ +394e811e9d9bf0b9fba837f7ceca9e8f3e39d1c2 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 9a5c6669009eb..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cc1ca9bd9e2c162dd1da8c2e7111913fd8033e48 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..49f55bea5e687 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 @@ -0,0 +1 @@ +5cd56acfa16ba20e19b5d21d90b510eada841431 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index cbf4f78c31999..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2fa3662a10a9e085b1c7b87293d727422cbe6224 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..c4b61b763b483 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 @@ -0,0 +1 @@ +db7b56f4cf533ad9022d2312c5ee48331edccca3 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index bd5bf428b6d44..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -60aa50c11857e6739e68936cb45102562b2c46b4 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..779cac9761242 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 @@ -0,0 +1 @@ +e8dba4d28a595eab2e8fb6095d1ac5f2d3872144 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index a73900802ace1..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4586368007785a3be26db4b9ce404ffb8c76f350 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..cf5c49a2759c9 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 @@ -0,0 +1 @@ +1243c771ee824c46a3d66ae3e4256d919fc06fbe \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index bf0a50f7154e5..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9c6d030ab2c148df7a6ba73a774ef4b8c720a6cb \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..830b9ccf9cbe2 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 @@ -0,0 +1 @@ +c783794b0d20d8dc1285edc7701f386b1f0e2fb8 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index ba6ceb2aed9d8..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8275bf8df2644d5fcec2963cf237d14b6e00fefe \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..a96e05f5e3b87 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 @@ -0,0 +1 @@ +9438efa504a89afb6cb4c66448c257f865164d23 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 4c0db7a735c8d..0000000000000 --- a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -557d62d2b13d3dcb1810a1633e22625e42425425 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..928cc6dea046c --- /dev/null +++ b/server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 @@ -0,0 +1 @@ +e1afb580df500626a1c695e0fc9a7e8a8f58bcac \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 0579316096a72..0000000000000 --- a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d3755ad4c98b49fe5055b32358e3071727177c03 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..a94663119e7d6 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 @@ -0,0 +1 @@ +a6ad941ef1fdad48673ed511631b7e48a9456bf7 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 134072bc13701..0000000000000 --- a/server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c1bbf611535f0b0fd0ba14e8da67c8d645b95244 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0.jar.sha1 b/server/licenses/lucene-core-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..80ba6c76aa301 --- /dev/null +++ b/server/licenses/lucene-core-7.4.0.jar.sha1 @@ -0,0 +1 @@ +730d9ac80436c8cbc0b2a8a749259be536b97316 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 8a3327cc8a227..0000000000000 --- a/server/licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b62ebd53bbefb2f59cd246157a6768cae8a5a3a1 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0.jar.sha1 b/server/licenses/lucene-grouping-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..5b781d26829ed --- /dev/null +++ b/server/licenses/lucene-grouping-7.4.0.jar.sha1 @@ -0,0 +1 @@ +56f99858a4421a517b52da36a222debcccab80c6 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 75fb5a7755639..0000000000000 --- a/server/licenses/lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cba0fd4ccb98db8a72287a95d6b653e455f9eeb3 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..e1ebb95fe1b05 --- /dev/null +++ b/server/licenses/lucene-highlighter-7.4.0.jar.sha1 @@ -0,0 +1 @@ +5266b45d7f049662817d739881765904621876d0 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 01e0197bc1713..0000000000000 --- a/server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5127ed0b7516f8b28d84e837df4f33c67e361f6c \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0.jar.sha1 b/server/licenses/lucene-join-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..ff81c33c3f860 --- /dev/null +++ b/server/licenses/lucene-join-7.4.0.jar.sha1 @@ -0,0 +1 @@ +c77154d18c4944ceb6ce0741060632f57d623fdc \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 3d6069f2a5c8b..0000000000000 --- a/server/licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -45c7b13aae1104f9f5f0fca0606e5741309c8d74 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0.jar.sha1 b/server/licenses/lucene-memory-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..7c0117dff6b68 --- /dev/null +++ b/server/licenses/lucene-memory-7.4.0.jar.sha1 @@ -0,0 +1 @@ +186ff981feec1bdbf1a6236e786ec171b5fbe3e0 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index a74be59aea39c..0000000000000 --- a/server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2540c4b5d9dca8a39a3b4d58efe4ab484df7254f \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0.jar.sha1 b/server/licenses/lucene-misc-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..5cdf6810fa57c --- /dev/null +++ b/server/licenses/lucene-misc-7.4.0.jar.sha1 @@ -0,0 +1 @@ +bf844bb6f6d84da19e8c79ce5fbb4cf6d00f2611 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index cf26412b63f80..0000000000000 --- a/server/licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e9d0c0c020917d4bf9b590526866ff5547dbaa17 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0.jar.sha1 b/server/licenses/lucene-queries-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..198890379374f --- /dev/null +++ b/server/licenses/lucene-queries-7.4.0.jar.sha1 @@ -0,0 +1 @@ +229a50e6d9d4db076f671c230d493000c6e2972c \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 63533b774673f..0000000000000 --- a/server/licenses/lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -50969cdb7279047fbec94dda6e7d74d1c73c07f8 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..afdc275afe2b3 --- /dev/null +++ b/server/licenses/lucene-queryparser-7.4.0.jar.sha1 @@ -0,0 +1 @@ +8e58add0d0c39df97d07c8e343041989bf4b3a3f \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 4eab31d62bd41..0000000000000 --- a/server/licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -94524b293572b1f0d01a0faeeade1ff24713f966 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..81ae3bddd0709 --- /dev/null +++ b/server/licenses/lucene-sandbox-7.4.0.jar.sha1 @@ -0,0 +1 @@ +1692604fa06a945d1ee19939022ef1a912235db3 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index ae5a2ea0375fd..0000000000000 --- a/server/licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -878db723e41ece636ed338c4ef374e900f221a14 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0.jar.sha1 b/server/licenses/lucene-spatial-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..cc3f31340b9a2 --- /dev/null +++ b/server/licenses/lucene-spatial-7.4.0.jar.sha1 @@ -0,0 +1 @@ +847d2f897961124e2fc7d5e55d8309635bb026bc \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 9f5129d89056a..0000000000000 --- a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c8dc85c32aeac6ff320aa6a9ea57881ad4847a55 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..3f05790e430f5 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 @@ -0,0 +1 @@ +586892eefc0546643d7f5d7f83659c7db0d534ff \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 02fcef681fc30..0000000000000 --- a/server/licenses/lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -203d8d22ab172e624784a5fdeaecdd01ae25fb3d \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..8c767b16c538b --- /dev/null +++ b/server/licenses/lucene-spatial3d-7.4.0.jar.sha1 @@ -0,0 +1 @@ +32cd2854f39ff453a5d128ce40e11eea4168abbf \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index a7daa7ff02a38..0000000000000 --- a/server/licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4d6cf8fa1064a86991d5cd12a2ed32119ac91212 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0.jar.sha1 b/server/licenses/lucene-suggest-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..59d59cf79413a --- /dev/null +++ b/server/licenses/lucene-suggest-7.4.0.jar.sha1 @@ -0,0 +1 @@ +0cdc1a512032f8b23dd4b1add0f5cd06325addc3 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 134072bc13701..0000000000000 --- a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c1bbf611535f0b0fd0ba14e8da67c8d645b95244 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..80ba6c76aa301 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 @@ -0,0 +1 @@ +730d9ac80436c8cbc0b2a8a749259be536b97316 \ No newline at end of file From 59e7c6411a04f08a325f02d612e12eab12b22316 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 22 Jun 2018 07:36:03 -0700 Subject: [PATCH 76/92] Core: Combine messageRecieved methods in TransportRequestHandler (#31519) TransportRequestHandler currently contains 2 messageReceived methods, one which takes a Task, and one that does not. The first just delegates to the second. This commit changes all existing implementors of TransportRequestHandler to implement the version which takes Task, thus allowing the class to be a functional interface, and eliminating the need to throw exceptions when a task needs to be ensured. --- .../netty4/Netty4ScheduledPingTests.java | 3 +- ...rossClusterSearchUnavailableClusterIT.java | 4 +- .../liveness/TransportLivenessAction.java | 3 +- .../cancel/TransportCancelTasksAction.java | 3 +- .../action/search/SearchTransportService.java | 169 +++++++----------- .../support/HandledTransportAction.java | 5 - .../broadcast/TransportBroadcastAction.java | 5 - .../node/TransportBroadcastByNodeAction.java | 2 +- .../support/nodes/TransportNodesAction.java | 6 - .../TransportReplicationAction.java | 16 -- ...ransportInstanceSingleOperationAction.java | 3 +- .../shard/TransportSingleShardAction.java | 5 +- .../support/tasks/TransportTasksAction.java | 2 +- .../index/NodeMappingRefreshAction.java | 3 +- .../action/shard/ShardStateAction.java | 5 +- .../discovery/zen/MasterFaultDetection.java | 3 +- .../discovery/zen/MembershipAction.java | 7 +- .../discovery/zen/NodesFaultDetection.java | 3 +- .../zen/PublishClusterStateAction.java | 5 +- .../discovery/zen/UnicastZenPing.java | 3 +- .../discovery/zen/ZenDiscovery.java | 3 +- .../gateway/LocalAllocateDangledIndices.java | 3 +- .../indices/flush/SyncedFlushService.java | 7 +- .../recovery/PeerRecoverySourceService.java | 3 +- .../recovery/PeerRecoveryTargetService.java | 22 ++- .../indices/store/IndicesStore.java | 3 +- .../VerifyNodeRepositoryAction.java | 3 +- .../transport/RequestHandlerRegistry.java | 2 +- .../TaskAwareTransportRequestHandler.java | 30 ---- .../transport/TransportActionProxy.java | 3 +- .../transport/TransportRequestHandler.java | 9 +- .../transport/TransportService.java | 2 +- .../action/IndicesRequestIT.java | 5 - .../TransportBroadcastByNodeActionTests.java | 2 +- .../TransportClientNodesServiceTests.java | 3 +- .../discovery/zen/ZenDiscoveryUnitTests.java | 4 +- .../RemoteClusterConnectionTests.java | 4 +- .../transport/TransportActionProxyTests.java | 14 +- .../AbstractSimpleTransportTestCase.java | 63 +++---- .../action/TransportRollupSearchAction.java | 5 - .../SecurityServerTransportInterceptor.java | 5 - 41 files changed, 177 insertions(+), 273 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/transport/TaskAwareTransportRequestHandler.java diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java index b967a7ea41069..bd62ff0af0b5a 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; @@ -91,7 +92,7 @@ public void testScheduledPing() throws Exception { serviceA.registerRequestHandler("sayHello", TransportRequest.Empty::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(TransportRequest.Empty request, TransportChannel channel) { + public void messageReceived(TransportRequest.Empty request, TransportChannel channel, Task task) { try { channel.sendResponse(TransportResponse.Empty.INSTANCE, TransportResponseOptions.EMPTY); } catch (IOException e) { diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 73df782c92049..29aec900cefa9 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -103,12 +103,12 @@ private static MockTransportService startTransport( MockTransportService newService = MockTransportService.createNewService(s, version, threadPool, null); try { newService.registerRequestHandler(ClusterSearchShardsAction.NAME, ThreadPool.Names.SAME, ClusterSearchShardsRequest::new, - (request, channel) -> { + (request, channel, task) -> { channel.sendResponse(new ClusterSearchShardsResponse(new ClusterSearchShardsGroup[0], knownNodes.toArray(new DiscoveryNode[0]), Collections.emptyMap())); }); newService.registerRequestHandler(ClusterStateAction.NAME, ThreadPool.Names.SAME, ClusterStateRequest::new, - (request, channel) -> { + (request, channel, task) -> { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); for (DiscoveryNode node : knownNodes) { builder.add(node); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java index 09c608ac84280..ef8014cade4dc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequestHandler; @@ -39,7 +40,7 @@ public TransportLivenessAction(ClusterService clusterService, TransportService t } @Override - public void messageReceived(LivenessRequest request, TransportChannel channel) throws Exception { + public void messageReceived(LivenessRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(new LivenessResponse(clusterService.getClusterName(), clusterService.localNode())); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index b99630dd4f960..918d56867627b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.threadpool.ThreadPool; @@ -285,7 +286,7 @@ public void writeTo(StreamOutput out) throws IOException { class BanParentRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final BanParentTaskRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final BanParentTaskRequest request, final TransportChannel channel, Task task) throws Exception { if (request.ban) { logger.debug("Received ban for the parent [{}] on the node [{}], reason: [{}]", request.parentTaskId, clusterService.localNode().getId(), request.reason); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 8a4c8b0882f08..dd43b82f8b862 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -45,13 +45,10 @@ import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.ScrollQuerySearchResult; -import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterService; -import org.elasticsearch.transport.TaskAwareTransportRequestHandler; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportActionProxy; -import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; @@ -314,150 +311,116 @@ public void writeTo(StreamOutput out) throws IOException { public static void registerRequestHandler(TransportService transportService, SearchService searchService) { transportService.registerRequestHandler(FREE_CONTEXT_SCROLL_ACTION_NAME, ThreadPool.Names.SAME, ScrollFreeContextRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ScrollFreeContextRequest request, TransportChannel channel, Task task) throws Exception { - boolean freed = searchService.freeContext(request.id()); - channel.sendResponse(new SearchFreeContextResponse(freed)); - } - }); + (request, channel, task) -> { + boolean freed = searchService.freeContext(request.id()); + channel.sendResponse(new SearchFreeContextResponse(freed)); + }); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_SCROLL_ACTION_NAME, (Supplier) SearchFreeContextResponse::new); transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, ThreadPool.Names.SAME, SearchFreeContextRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(SearchFreeContextRequest request, TransportChannel channel, Task task) throws Exception { - boolean freed = searchService.freeContext(request.id()); - channel.sendResponse(new SearchFreeContextResponse(freed)); - } - }); + (request, channel, task) -> { + boolean freed = searchService.freeContext(request.id()); + channel.sendResponse(new SearchFreeContextResponse(freed)); + }); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_ACTION_NAME, (Supplier) SearchFreeContextResponse::new); transportService.registerRequestHandler(CLEAR_SCROLL_CONTEXTS_ACTION_NAME, () -> TransportRequest.Empty.INSTANCE, - ThreadPool.Names.SAME, new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(TransportRequest.Empty request, TransportChannel channel, Task task) throws Exception { - searchService.freeAllScrollContexts(); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } - }); + ThreadPool.Names.SAME, (request, channel, task) -> { + searchService.freeAllScrollContexts(); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + }); TransportActionProxy.registerProxyAction(transportService, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, () -> TransportResponse.Empty.INSTANCE); transportService.registerRequestHandler(DFS_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception { - searchService.executeDfsPhase(request, (SearchTask) task, new ActionListener() { - @Override - public void onResponse(SearchPhaseResult searchPhaseResult) { - try { - channel.sendResponse(searchPhaseResult); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + (request, channel, task) -> { + searchService.executeDfsPhase(request, (SearchTask) task, new ActionListener() { + @Override + public void onResponse(SearchPhaseResult searchPhaseResult) { + try { + channel.sendResponse(searchPhaseResult); + } catch (IOException e) { + throw new UncheckedIOException(e); } - - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (IOException e1) { - throw new UncheckedIOException(e1); - } + } + + @Override + public void onFailure(Exception e) { + try { + channel.sendResponse(e); + } catch (IOException e1) { + throw new UncheckedIOException(e1); } - }); - - } + } + }); }); TransportActionProxy.registerProxyAction(transportService, DFS_ACTION_NAME, DfsSearchResult::new); transportService.registerRequestHandler(QUERY_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception { - searchService.executeQueryPhase(request, (SearchTask) task, new ActionListener() { - @Override - public void onResponse(SearchPhaseResult searchPhaseResult) { - try { - channel.sendResponse(searchPhaseResult); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + (request, channel, task) -> { + searchService.executeQueryPhase(request, (SearchTask) task, new ActionListener() { + @Override + public void onResponse(SearchPhaseResult searchPhaseResult) { + try { + channel.sendResponse(searchPhaseResult); + } catch (IOException e) { + throw new UncheckedIOException(e); } - - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (IOException e1) { - throw new UncheckedIOException(e1); - } + } + + @Override + public void onFailure(Exception e) { + try { + channel.sendResponse(e); + } catch (IOException e1) { + throw new UncheckedIOException(e1); } - }); - } + } + }); }); TransportActionProxy.registerProxyAction(transportService, QUERY_ACTION_NAME, (request) -> ((ShardSearchRequest)request).numberOfShards() == 1 ? QueryFetchSearchResult::new : QuerySearchResult::new); transportService.registerRequestHandler(QUERY_ID_ACTION_NAME, ThreadPool.Names.SEARCH, QuerySearchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(QuerySearchRequest request, TransportChannel channel, Task task) throws Exception { - QuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + QuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, QUERY_ID_ACTION_NAME, QuerySearchResult::new); transportService.registerRequestHandler(QUERY_SCROLL_ACTION_NAME, ThreadPool.Names.SEARCH, InternalScrollSearchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel, Task task) throws Exception { - ScrollQuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + ScrollQuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, QUERY_SCROLL_ACTION_NAME, ScrollQuerySearchResult::new); transportService.registerRequestHandler(QUERY_FETCH_SCROLL_ACTION_NAME, ThreadPool.Names.SEARCH, InternalScrollSearchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel, Task task) throws Exception { - ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, QUERY_FETCH_SCROLL_ACTION_NAME, ScrollQueryFetchSearchResult::new); transportService.registerRequestHandler(FETCH_ID_SCROLL_ACTION_NAME, ThreadPool.Names.SEARCH, ShardFetchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardFetchRequest request, TransportChannel channel, Task task) throws Exception { - FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, FETCH_ID_SCROLL_ACTION_NAME, FetchSearchResult::new); transportService.registerRequestHandler(FETCH_ID_ACTION_NAME, ThreadPool.Names.SEARCH, ShardFetchSearchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardFetchSearchRequest request, TransportChannel channel, Task task) throws Exception { - FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, FETCH_ID_ACTION_NAME, FetchSearchResult::new); // this is cheap, it does not fetch during the rewrite phase, so we can let it quickly execute on a networking thread transportService.registerRequestHandler(QUERY_CAN_MATCH_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception { - boolean canMatch = searchService.canMatch(request); - channel.sendResponse(new CanMatchResponse(canMatch)); - } + (request, channel, task) -> { + boolean canMatch = searchService.canMatch(request); + channel.sendResponse(new CanMatchResponse(canMatch)); }); TransportActionProxy.registerProxyAction(transportService, QUERY_CAN_MATCH_NAME, (Supplier) CanMatchResponse::new); diff --git a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index 7cdcd017b9946..c55e0cff6f250 100644 --- a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -64,11 +64,6 @@ protected HandledTransportAction(Settings settings, String actionName, boolean c class TransportHandler implements TransportRequestHandler { - @Override - public final void messageReceived(Request request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override public final void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { // We already got the task created on the network layer - no need to create it again on the transport layer diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index 8a28c2c9d891d..1bec46fd1213e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -284,10 +284,5 @@ class ShardTransportHandler implements TransportRequestHandler { public void messageReceived(ShardRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(shardOperation(request, task)); } - - @Override - public final void messageReceived(final ShardRequest request, final TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required"); - } } } diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index dac1a55b6361f..348162b8c33bd 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -393,7 +393,7 @@ protected void onCompletion() { class BroadcastByNodeTransportRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final NodeRequest request, TransportChannel channel) throws Exception { + public void messageReceived(final NodeRequest request, TransportChannel channel, Task task) throws Exception { List shards = request.getShards(); final int totalShards = shards.size(); if (logger.isTraceEnabled()) { diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 7a074c91c7152..6a9ac53f7bebd 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -258,12 +258,6 @@ class NodeTransportHandler implements TransportRequestHandler { public void messageReceived(NodeRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(nodeOperation(request, task)); } - - @Override - public void messageReceived(NodeRequest request, TransportChannel channel) throws Exception { - channel.sendResponse(nodeOperation(request)); - } - } } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index d7c908bf9fa5b..c31ee81a802a5 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -273,11 +273,6 @@ public void onFailure(Exception e) { } }); } - - @Override - public void messageReceived(Request request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } } protected class PrimaryOperationTransportHandler implements TransportRequestHandler> { @@ -286,11 +281,6 @@ public PrimaryOperationTransportHandler() { } - @Override - public void messageReceived(final ConcreteShardRequest request, final TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override public void messageReceived(ConcreteShardRequest request, TransportChannel channel, Task task) { new AsyncPrimaryAction(request.request, request.targetAllocationID, request.primaryTerm, channel, (ReplicationTask) task).run(); @@ -493,12 +483,6 @@ public void respond(ActionListener listener) { public class ReplicaOperationTransportHandler implements TransportRequestHandler> { - @Override - public void messageReceived( - final ConcreteReplicaRequest replicaRequest, final TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override public void messageReceived( final ConcreteReplicaRequest replicaRequest, diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index 280a35207a9db..2d8ccb6e524f4 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportChannel; @@ -243,7 +244,7 @@ public void onTimeout(TimeValue timeout) { private class ShardTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(final Request request, final TransportChannel channel) throws Exception { + public void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { shardOperation(request, new ActionListener() { @Override public void onResponse(Response response) { diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index d7e5633559d8a..7116061640f3e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -40,6 +40,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; @@ -271,7 +272,7 @@ public void handleException(TransportException exp) { private class TransportHandler implements TransportRequestHandler { @Override - public void messageReceived(Request request, final TransportChannel channel) throws Exception { + public void messageReceived(Request request, final TransportChannel channel, Task task) throws Exception { // if we have a local operation, execute it on a thread since we don't spawn execute(request, new ActionListener() { @Override @@ -298,7 +299,7 @@ public void onFailure(Exception e) { private class ShardTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(final Request request, final TransportChannel channel) throws Exception { + public void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { if (logger.isTraceEnabled()) { logger.trace("executing [{}] on shard [{}]", request, request.internalShardId); } diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index 5599dd5f98b06..ee116d9f957c6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -338,7 +338,7 @@ private void finishHim() { class NodeTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(final NodeTaskRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final NodeTaskRequest request, final TransportChannel channel, Task task) throws Exception { nodeOperation(request, new ActionListener() { @Override public void onResponse( diff --git a/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java b/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java index fc7a4206486a3..2559c14848d76 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -65,7 +66,7 @@ public void nodeMappingRefresh(final DiscoveryNode masterNode, final NodeMapping private class NodeMappingRefreshTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(NodeMappingRefreshRequest request, TransportChannel channel) throws Exception { + public void messageReceived(NodeMappingRefreshRequest request, TransportChannel channel, Task task) throws Exception { metaDataMappingService.refreshMapping(request.index(), request.indexUUID()); channel.sendResponse(TransportResponse.Empty.INSTANCE); } diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index f690efa4c9a0c..0949e47cd0527 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -52,6 +52,7 @@ import org.elasticsearch.discovery.Discovery; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.EmptyTransportResponseHandler; @@ -237,7 +238,7 @@ private static class ShardFailedTransportHandler implements TransportRequestHand } @Override - public void messageReceived(FailedShardEntry request, TransportChannel channel) throws Exception { + public void messageReceived(FailedShardEntry request, TransportChannel channel, Task task) throws Exception { logger.debug(() -> new ParameterizedMessage("{} received shard failed for {}", request.shardId, request), request.failure); clusterService.submitStateUpdateTask( "shard-failed", @@ -487,7 +488,7 @@ private static class ShardStartedTransportHandler implements TransportRequestHan } @Override - public void messageReceived(StartedShardEntry request, TransportChannel channel) throws Exception { + public void messageReceived(StartedShardEntry request, TransportChannel channel, Task task) throws Exception { logger.debug("{} received shard started for [{}]", request.shardId, request); clusterService.submitStateUpdateTask( "shard-started " + request, diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/MasterFaultDetection.java b/server/src/main/java/org/elasticsearch/discovery/zen/MasterFaultDetection.java index c38cfe88619ee..5acf2effad390 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/MasterFaultDetection.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/MasterFaultDetection.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportChannel; @@ -321,7 +322,7 @@ public Throwable fillInStackTrace() { private class MasterPingRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final MasterPingRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final MasterPingRequest request, final TransportChannel channel, Task task) throws Exception { final DiscoveryNodes nodes = clusterStateSupplier.get().nodes(); // check if we are really the same master as the one we seemed to be think we are // this can happen if the master got "kill -9" and then another node started using the same port diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java b/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java index fdfcd8ac29079..e8bafea66d3a4 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -133,7 +134,7 @@ public void writeTo(StreamOutput out) throws IOException { private class JoinRequestRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final JoinRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final JoinRequest request, final TransportChannel channel, Task task) throws Exception { listener.onJoin(request.node, new JoinCallback() { @Override public void onSuccess() { @@ -190,7 +191,7 @@ static class ValidateJoinRequestRequestHandler implements TransportRequestHandle } @Override - public void messageReceived(ValidateJoinRequest request, TransportChannel channel) throws Exception { + public void messageReceived(ValidateJoinRequest request, TransportChannel channel, Task task) throws Exception { DiscoveryNode node = localNodeSupplier.get(); assert node != null : "local node is null"; joinValidators.stream().forEach(action -> action.accept(node, request.state)); @@ -281,7 +282,7 @@ public void writeTo(StreamOutput out) throws IOException { private class LeaveRequestRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(LeaveRequest request, TransportChannel channel) throws Exception { + public void messageReceived(LeaveRequest request, TransportChannel channel, Task task) throws Exception { listener.onLeave(request.node); channel.sendResponse(TransportResponse.Empty.INSTANCE); } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/NodesFaultDetection.java b/server/src/main/java/org/elasticsearch/discovery/zen/NodesFaultDetection.java index d19cc98441b79..57e5cab020be1 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/NodesFaultDetection.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/NodesFaultDetection.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportChannel; @@ -276,7 +277,7 @@ public String executor() { class PingRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(PingRequest request, TransportChannel channel) throws Exception { + public void messageReceived(PingRequest request, TransportChannel channel, Task task) throws Exception { // if we are not the node we are supposed to be pinged, send an exception // this can happen when a kill -9 is sent, and another node is started using the same port if (!localNode.equals(request.targetNode())) { diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java b/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java index 5398b2a057ae4..5e9f960e893cf 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java @@ -45,6 +45,7 @@ import org.elasticsearch.discovery.BlockingClusterStatePublishResponseHandler; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.EmptyTransportResponseHandler; @@ -447,14 +448,14 @@ public void onFailure(Exception e) { private class SendClusterStateRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(BytesTransportRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(BytesTransportRequest request, final TransportChannel channel, Task task) throws Exception { handleIncomingClusterStateRequest(request, channel); } } private class CommitClusterStateRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(CommitClusterStateRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(CommitClusterStateRequest request, final TransportChannel channel, Task task) throws Exception { handleCommitRequest(request, channel); } } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index 9c86fa17e9b06..74414dc446e6d 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -45,6 +45,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.KeyedLock; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; @@ -563,7 +564,7 @@ private UnicastPingResponse handlePingRequest(final UnicastPingRequest request) class UnicastPingRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(UnicastPingRequest request, TransportChannel channel) throws Exception { + public void messageReceived(UnicastPingRequest request, TransportChannel channel, Task task) throws Exception { if (closed) { throw new AlreadyClosedException("node is shutting down"); } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 55ecf7ca25fa6..eb9a9f8d4885d 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -56,6 +56,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.DiscoveryStats; import org.elasticsearch.discovery.zen.PublishClusterStateAction.IncomingClusterStateListener; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -1187,7 +1188,7 @@ public void writeTo(StreamOutput out) throws IOException { class RejoinClusterRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final RejoinClusterRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final RejoinClusterRequest request, final TransportChannel channel, Task task) throws Exception { try { channel.sendResponse(TransportResponse.Empty.INSTANCE); } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java b/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java index c8986b0493459..7bc2e38dde024 100644 --- a/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java +++ b/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.MasterNotDiscoveredException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; @@ -112,7 +113,7 @@ public interface Listener { class AllocateDangledRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final AllocateDangledRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final AllocateDangledRequest request, final TransportChannel channel, Task task) throws Exception { String[] indexNames = new String[request.indices.length]; for (int i = 0; i < request.indices.length; i++) { indexNames[i] = request.indices[i].getIndex().getName(); diff --git a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index 6ef6c1546d152..f01b4bb312174 100644 --- a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -54,6 +54,7 @@ import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; @@ -778,7 +779,7 @@ public String toString() { private final class PreSyncedFlushTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(PreShardSyncedFlushRequest request, TransportChannel channel) throws Exception { + public void messageReceived(PreShardSyncedFlushRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(performPreSyncedFlush(request)); } } @@ -786,7 +787,7 @@ public void messageReceived(PreShardSyncedFlushRequest request, TransportChannel private final class SyncedFlushTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(ShardSyncedFlushRequest request, TransportChannel channel) throws Exception { + public void messageReceived(ShardSyncedFlushRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(performSyncedFlush(request)); } } @@ -794,7 +795,7 @@ public void messageReceived(ShardSyncedFlushRequest request, TransportChannel ch private final class InFlightOpCountTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(InFlightOpsRequest request, TransportChannel channel) throws Exception { + public void messageReceived(InFlightOpsRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(performInFlightOps(request)); } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java index 51eabdd4e8c73..06e8a5734f69b 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequestHandler; @@ -103,7 +104,7 @@ private RecoveryResponse recover(final StartRecoveryRequest request) throws IOEx class StartRecoveryTransportRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final StartRecoveryRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final StartRecoveryRequest request, final TransportChannel channel, Task task) throws Exception { RecoveryResponse response = recover(request); channel.sendResponse(response); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index cb49eed25f8fe..aaa4697e5cbb5 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -55,6 +55,7 @@ import org.elasticsearch.index.translog.TranslogCorruptedException; import org.elasticsearch.indices.recovery.RecoveriesCollection.RecoveryRef; import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.FutureTransportResponseHandler; @@ -397,7 +398,8 @@ public interface RecoveryListener { class PrepareForTranslogOperationsRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, TransportChannel channel, + Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { recoveryRef.target().prepareForTranslogOperations(request.isFileBasedRecovery(), request.totalTranslogOps()); @@ -409,7 +411,7 @@ public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, class FinalizeRecoveryRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId())) { recoveryRef.target().finalizeRecovery(request.globalCheckpoint()); @@ -421,7 +423,7 @@ public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportCh class WaitForClusterStateRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryWaitForClusterStateRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryWaitForClusterStateRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { recoveryRef.target().ensureClusterStateVersion(request.clusterStateVersion()); @@ -433,7 +435,8 @@ public void messageReceived(RecoveryWaitForClusterStateRequest request, Transpor class HandoffPrimaryContextRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final RecoveryHandoffPrimaryContextRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final RecoveryHandoffPrimaryContextRequest request, final TransportChannel channel, + Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId())) { recoveryRef.target().handoffPrimaryContext(request.primaryContext()); } @@ -445,7 +448,8 @@ public void messageReceived(final RecoveryHandoffPrimaryContextRequest request, class TranslogOperationsRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final RecoveryTranslogOperationsRequest request, final TransportChannel channel) throws IOException { + public void messageReceived(final RecoveryTranslogOperationsRequest request, final TransportChannel channel, + Task task) throws IOException { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId())) { final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext()); @@ -463,7 +467,7 @@ public void messageReceived(final RecoveryTranslogOperationsRequest request, fin @Override public void onNewClusterState(ClusterState state) { try { - messageReceived(request, channel); + messageReceived(request, channel, task); } catch (Exception e) { onFailure(e); } @@ -537,7 +541,7 @@ public void onTimeout(TimeValue timeout) { class FilesInfoRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { recoveryRef.target().receiveFileInfo(request.phase1FileNames, request.phase1FileSizes, request.phase1ExistingFileNames, @@ -550,7 +554,7 @@ public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel c class CleanFilesRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryCleanFilesRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryCleanFilesRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { recoveryRef.target().cleanFiles(request.totalTranslogOps(), request.sourceMetaSnapshot()); @@ -565,7 +569,7 @@ class FileChunkTransportRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final ShardActiveRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final ShardActiveRequest request, final TransportChannel channel, Task task) throws Exception { IndexShard indexShard = getShard(request); // make sure shard is really there before register cluster state observer diff --git a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java index ba3f9c048d08a..380ae97408016 100644 --- a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoriesService.VerifyResponse; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -146,7 +147,7 @@ public void writeTo(StreamOutput out) throws IOException { class VerifyNodeRepositoryRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(VerifyNodeRepositoryRequest request, TransportChannel channel) throws Exception { + public void messageReceived(VerifyNodeRepositoryRequest request, TransportChannel channel, Task task) throws Exception { DiscoveryNode localNode = clusterService.state().nodes().getLocalNode(); try { doVerify(request.repository, request.verificationToken, localNode); diff --git a/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java b/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java index 91b54ab8f2097..4e09daf9ccf0a 100644 --- a/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java +++ b/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java @@ -59,7 +59,7 @@ public Request newRequest(StreamInput in) throws IOException { public void processMessageReceived(Request request, TransportChannel channel) throws Exception { final Task task = taskManager.register(channel.getChannelType(), action, request); if (task == null) { - handler.messageReceived(request, channel); + handler.messageReceived(request, channel, null); } else { boolean success = false; try { diff --git a/server/src/main/java/org/elasticsearch/transport/TaskAwareTransportRequestHandler.java b/server/src/main/java/org/elasticsearch/transport/TaskAwareTransportRequestHandler.java deleted file mode 100644 index 12899d86d430d..0000000000000 --- a/server/src/main/java/org/elasticsearch/transport/TaskAwareTransportRequestHandler.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.transport; - -/** - * Transport request handlers that is using task context - */ -public abstract class TaskAwareTransportRequestHandler implements TransportRequestHandler { - @Override - public final void messageReceived(T request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required"); - } -} diff --git a/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java b/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java index 8c48f08874350..a17509e826003 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -52,7 +53,7 @@ private static class ProxyRequestHandler implements Tran } @Override - public void messageReceived(T request, TransportChannel channel) throws Exception { + public void messageReceived(T request, TransportChannel channel, Task task) throws Exception { DiscoveryNode targetNode = request.targetNode; TransportRequest wrappedRequest = request.wrapped; service.sendRequest(targetNode, action, wrappedRequest, diff --git a/server/src/main/java/org/elasticsearch/transport/TransportRequestHandler.java b/server/src/main/java/org/elasticsearch/transport/TransportRequestHandler.java index 8c90b82fe7c45..be95798806847 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportRequestHandler.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportRequestHandler.java @@ -23,12 +23,5 @@ public interface TransportRequestHandler { - /** - * Override this method if access to the Task parameter is needed - */ - default void messageReceived(final T request, final TransportChannel channel, Task task) throws Exception { - messageReceived(request, channel); - } - - void messageReceived(T request, TransportChannel channel) throws Exception; + void messageReceived(T request, TransportChannel channel, Task task) throws Exception; } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 656d8c3841769..8d3929cd6615a 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -231,7 +231,7 @@ protected void doStart() { () -> HandshakeRequest.INSTANCE, ThreadPool.Names.SAME, false, false, - (request, channel) -> channel.sendResponse( + (request, channel, task) -> channel.sendResponse( new HandshakeResponse(localNode, clusterName, localNode.getVersion()))); if (connectToRemoteCluster) { // here we start to connect to the remote clusters diff --git a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java index 8fac0b91cd6d6..40795bff730e0 100644 --- a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java @@ -779,11 +779,6 @@ public void messageReceived(T request, TransportChannel channel, Task task) thro } requestHandler.messageReceived(request, channel, task); } - - @Override - public void messageReceived(T request, TransportChannel channel) throws Exception { - messageReceived(request, channel, null); - } } } } diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 61beb59bc0c24..fdc3d890363ad 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -364,7 +364,7 @@ public void testOperationExecution() throws Exception { TestTransportChannel channel = new TestTransportChannel(); - handler.messageReceived(action.new NodeRequest(nodeId, new Request(), new ArrayList<>(shards)), channel); + handler.messageReceived(action.new NodeRequest(nodeId, new Request(), new ArrayList<>(shards)), channel, null); // check the operation was executed only on the expected shards assertEquals(shards, action.getResults().keySet()); diff --git a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index c8030e1cf4aee..2beaed1e106e4 100644 --- a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.node.Node; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; @@ -469,7 +470,7 @@ class MockHandler implements TransportRequestHandler { } @Override - public void messageReceived(ClusterStateRequest request, TransportChannel channel) throws Exception { + public void messageReceived(ClusterStateRequest request, TransportChannel channel, Task task) throws Exception { if (block.get()) { release.await(); return; diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java index a60a23bcd6d5c..6dbf80d9be675 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java @@ -368,7 +368,7 @@ public void testValidateOnUnsupportedIndexVersionCreated() throws Exception { .routingTable(RoutingTable.builder().add(indexRoutingTable).build()); if (incompatible) { IllegalStateException ex = expectThrows(IllegalStateException.class, () -> - request.messageReceived(new MembershipAction.ValidateJoinRequest(stateBuilder.build()), null)); + request.messageReceived(new MembershipAction.ValidateJoinRequest(stateBuilder.build()), null, null)); assertEquals("index [test] version not supported: " + VersionUtils.getPreviousVersion(Version.CURRENT.minimumIndexCompatibilityVersion()) + " minimum compatible index version is: " + Version.CURRENT.minimumIndexCompatibilityVersion(), ex.getMessage()); @@ -400,7 +400,7 @@ public void sendResponse(TransportResponse response, TransportResponseOptions op public void sendResponse(Exception exception) throws IOException { } - }); + }, null); assertTrue(sendResponse.get()); } } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 637b8fb26a880..0369eda2a8899 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -114,7 +114,7 @@ public static MockTransportService startTransport( MockTransportService newService = MockTransportService.createNewService(s, version, threadPool, null); try { newService.registerRequestHandler(ClusterSearchShardsAction.NAME,ThreadPool.Names.SAME, ClusterSearchShardsRequest::new, - (request, channel) -> { + (request, channel, task) -> { if ("index_not_found".equals(request.preference())) { channel.sendResponse(new IndexNotFoundException("index")); } else { @@ -123,7 +123,7 @@ public static MockTransportService startTransport( } }); newService.registerRequestHandler(ClusterStateAction.NAME, ThreadPool.Names.SAME, ClusterStateRequest::new, - (request, channel) -> { + (request, channel, task) -> { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); for (DiscoveryNode node : knownNodes) { builder.add(node); diff --git a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java index 3f4ae7bdd2d76..491ba123a451d 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.transport; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; @@ -26,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; @@ -88,7 +88,7 @@ private MockTransportService buildService(final Version version) { public void testSendMessage() throws InterruptedException { serviceA.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_A"; @@ -98,7 +98,7 @@ public void testSendMessage() throws InterruptedException { serviceA.connectToNode(nodeB); serviceB.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_B"; @@ -107,7 +107,7 @@ public void testSendMessage() throws InterruptedException { TransportActionProxy.registerProxyAction(serviceB, "/test", SimpleTestResponse::new); serviceB.connectToNode(nodeC); serviceC.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_C"; @@ -151,7 +151,7 @@ public String executor() { public void testException() throws InterruptedException { serviceA.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_A"; @@ -161,7 +161,7 @@ public void testException() throws InterruptedException { serviceA.connectToNode(nodeB); serviceB.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_B"; @@ -170,7 +170,7 @@ public void testException() throws InterruptedException { TransportActionProxy.registerProxyAction(serviceB, "/test", SimpleTestResponse::new); serviceB.connectToNode(nodeC); serviceC.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { throw new ElasticsearchException("greetings from TS_C"); }); TransportActionProxy.registerProxyAction(serviceC, "/test", SimpleTestResponse::new); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 587c192beb2d6..0b676e1403481 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -47,6 +47,7 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.mocksocket.MockServerSocket; import org.elasticsearch.node.Node; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.transport.MockTransportService; @@ -205,7 +206,7 @@ public void assertNoPendingHandshakes(Transport transport) { public void testHelloWorld() { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { assertThat("moshe", equalTo(request.message)); try { channel.sendResponse(new StringMessageResponse("hello " + request.message)); @@ -280,7 +281,7 @@ public void handleException(TransportException exp) { public void testThreadContext() throws ExecutionException, InterruptedException { - serviceA.registerRequestHandler("ping_pong", StringMessageRequest::new, ThreadPool.Names.GENERIC, (request, channel) -> { + serviceA.registerRequestHandler("ping_pong", StringMessageRequest::new, ThreadPool.Names.GENERIC, (request, channel, task) -> { assertEquals("ping_user", threadPool.getThreadContext().getHeader("test.ping.user")); assertNull(threadPool.getThreadContext().getTransient("my_private_context")); try { @@ -339,7 +340,7 @@ public void testLocalNodeConnection() throws InterruptedException { serviceA.disconnectFromNode(nodeA); final AtomicReference exception = new AtomicReference<>(); serviceA.registerRequestHandler("localNode", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { try { channel.sendResponse(new StringMessageResponse(request.message)); } catch (IOException e) { @@ -377,7 +378,7 @@ public String executor() { } public void testAdapterSendReceiveCallbacks() throws Exception { - final TransportRequestHandler requestHandler = (request, channel) -> { + final TransportRequestHandler requestHandler = (request, channel, task) -> { try { if (randomBoolean()) { channel.sendResponse(TransportResponse.Empty.INSTANCE); @@ -485,7 +486,7 @@ public void requestSent(DiscoveryNode node, long requestId, String action, Trans public void testVoidMessageCompressed() { serviceA.registerRequestHandler("sayHello", TransportRequest.Empty::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { try { TransportResponseOptions responseOptions = TransportResponseOptions.builder().withCompress(true).build(); channel.sendResponse(TransportResponse.Empty.INSTANCE, responseOptions); @@ -531,7 +532,7 @@ public void testHelloWorldCompressed() { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) { assertThat("moshe", equalTo(request.message)); try { TransportResponseOptions responseOptions = TransportResponseOptions.builder().withCompress(true).build(); @@ -580,7 +581,7 @@ public void testErrorMessage() { serviceA.registerRequestHandler("sayHelloException", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) throws Exception { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) throws Exception { assertThat("moshe", equalTo(request.message)); throw new RuntimeException("bad message !!!"); } @@ -639,7 +640,7 @@ public void testConcurrentSendRespondAndDisconnect() throws BrokenBarrierExcepti Set sendingErrors = ConcurrentCollections.newConcurrentSet(); Set responseErrors = ConcurrentCollections.newConcurrentSet(); serviceA.registerRequestHandler("test", TestRequest::new, - randomBoolean() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC, (request, channel) -> { + randomBoolean() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC, (request, channel, task) -> { try { channel.sendResponse(new TestResponse()); } catch (Exception e) { @@ -647,7 +648,7 @@ public void testConcurrentSendRespondAndDisconnect() throws BrokenBarrierExcepti responseErrors.add(e); } }); - final TransportRequestHandler ignoringRequestHandler = (request, channel) -> { + final TransportRequestHandler ignoringRequestHandler = (request, channel, task) -> { try { channel.sendResponse(new TestResponse()); } catch (Exception e) { @@ -763,7 +764,7 @@ public void testNotifyOnShutdown() throws Exception { final CountDownLatch latch2 = new CountDownLatch(1); try { serviceA.registerRequestHandler("foobar", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { try { latch2.await(); logger.info("Stop ServiceB now"); @@ -791,7 +792,7 @@ public void testTimeoutSendExceptionWithNeverSendingBackResponse() throws Except serviceA.registerRequestHandler("sayHelloTimeoutNoResponse", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) { assertThat("moshe", equalTo(request.message)); // don't send back a response } @@ -836,7 +837,7 @@ public void testTimeoutSendExceptionWithDelayedResponse() throws Exception { serviceA.registerRequestHandler("sayHelloTimeoutDelayedResponse", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) throws InterruptedException { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) throws InterruptedException { String message = request.message; inFlight.acquireUninterruptibly(); try { @@ -938,10 +939,10 @@ public void handleException(TransportException exp) { } public void testTracerLog() throws InterruptedException { - TransportRequestHandler handler = (request, channel) -> channel.sendResponse(new StringMessageResponse("")); + TransportRequestHandler handler = (request, channel, task) -> channel.sendResponse(new StringMessageResponse("")); TransportRequestHandler handlerWithError = new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) throws Exception { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) throws Exception { if (request.timeout() > 0) { Thread.sleep(request.timeout); } @@ -1257,7 +1258,7 @@ public void testVersionFrom0to1() throws Exception { serviceB.registerRequestHandler("/version", Version1Request::new, ThreadPool.Names.SAME, new TransportRequestHandler() { @Override - public void messageReceived(Version1Request request, TransportChannel channel) throws Exception { + public void messageReceived(Version1Request request, TransportChannel channel, Task task) throws Exception { assertThat(request.value1, equalTo(1)); assertThat(request.value2, equalTo(0)); // not set, coming from service A Version1Response response = new Version1Response(); @@ -1301,7 +1302,7 @@ public void testVersionFrom1to0() throws Exception { serviceA.registerRequestHandler("/version", Version0Request::new, ThreadPool.Names.SAME, new TransportRequestHandler() { @Override - public void messageReceived(Version0Request request, TransportChannel channel) throws Exception { + public void messageReceived(Version0Request request, TransportChannel channel, Task task) throws Exception { assertThat(request.value1, equalTo(1)); Version0Response response = new Version0Response(); response.value1 = 1; @@ -1344,7 +1345,7 @@ public String executor() { public void testVersionFrom1to1() throws Exception { serviceB.registerRequestHandler("/version", Version1Request::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertThat(request.value1, equalTo(1)); assertThat(request.value2, equalTo(2)); Version1Response response = new Version1Response(); @@ -1388,7 +1389,7 @@ public String executor() { public void testVersionFrom0to0() throws Exception { serviceA.registerRequestHandler("/version", Version0Request::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertThat(request.value1, equalTo(1)); Version0Response response = new Version0Response(); response.value1 = 1; @@ -1427,7 +1428,7 @@ public String executor() { public void testMockFailToSendNoConnectRule() throws Exception { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { assertThat("moshe", equalTo(request.message)); throw new RuntimeException("bad message !!!"); }); @@ -1484,7 +1485,7 @@ public void handleException(TransportException exp) { public void testMockUnresponsiveRule() throws IOException { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { assertThat("moshe", equalTo(request.message)); throw new RuntimeException("bad message !!!"); }); @@ -1540,7 +1541,7 @@ public void testHostOnMessages() throws InterruptedException { final AtomicReference addressB = new AtomicReference<>(); serviceB.registerRequestHandler("action1", TestRequest::new, ThreadPool.Names.SAME, new TransportRequestHandler() { @Override - public void messageReceived(TestRequest request, TransportChannel channel) throws Exception { + public void messageReceived(TestRequest request, TransportChannel channel, Task task) throws Exception { addressA.set(request.remoteAddress()); channel.sendResponse(new TestResponse()); latch.countDown(); @@ -1582,7 +1583,7 @@ public void testBlockingIncomingRequests() throws Exception { Settings.EMPTY, false, false)) { AtomicBoolean requestProcessed = new AtomicBoolean(false); service.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { requestProcessed.set(true); channel.sendResponse(TransportResponse.Empty.INSTANCE); }); @@ -1744,7 +1745,7 @@ class TestRequestHandler implements TransportRequestHandler { } @Override - public void messageReceived(TestRequest request, TransportChannel channel) throws Exception { + public void messageReceived(TestRequest request, TransportChannel channel, Task task) throws Exception { if (randomBoolean()) { Thread.sleep(randomIntBetween(10, 50)); } @@ -1868,18 +1869,18 @@ public String executor() { public void testRegisterHandlerTwice() { serviceB.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), - (request, message) -> { + (request, message, task) -> { throw new AssertionError("boom"); }); expectThrows(IllegalArgumentException.class, () -> serviceB.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), - (request, message) -> { + (request, message, task) -> { throw new AssertionError("boom"); }) ); serviceA.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), - (request, message) -> { + (request, message, task) -> { throw new AssertionError("boom"); }); } @@ -2066,7 +2067,7 @@ public void testResponseHeadersArePreserved() throws InterruptedException { List executors = new ArrayList<>(ThreadPool.THREAD_POOL_TYPES.keySet()); CollectionUtil.timSort(executors); // makes sure it's reproducible serviceA.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { threadPool.getThreadContext().putTransient("boom", new Object()); threadPool.getThreadContext().addResponseHeader("foo.bar", "baz"); @@ -2127,7 +2128,7 @@ public void testHandlerIsInvokedOnConnectionClose() throws IOException, Interrup CollectionUtil.timSort(executors); // makes sure it's reproducible TransportService serviceC = build(Settings.builder().put("name", "TS_TEST").build(), version0, null, true); serviceC.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { // do nothing }); serviceC.start(); @@ -2187,7 +2188,7 @@ public void testConcurrentDisconnectOnNonPublishedConnection() throws IOExceptio CountDownLatch receivedLatch = new CountDownLatch(1); CountDownLatch sendResponseLatch = new CountDownLatch(1); serviceC.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { // don't block on a network thread here threadPool.generic().execute(new AbstractRunnable() { @Override @@ -2255,7 +2256,7 @@ public void testTransportStats() throws Exception { CountDownLatch receivedLatch = new CountDownLatch(1); CountDownLatch sendResponseLatch = new CountDownLatch(1); serviceB.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { // don't block on a network thread here threadPool.generic().execute(new AbstractRunnable() { @Override @@ -2368,7 +2369,7 @@ public void testTransportStatsWithException() throws Exception { Exception ex = new RuntimeException("boom"); ex.setStackTrace(new StackTraceElement[0]); serviceB.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { // don't block on a network thread here threadPool.generic().execute(new AbstractRunnable() { @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index a9f3dc5a1b786..239be32033f13 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -399,11 +399,6 @@ static RollupSearchContext separateIndices(String[] indices, ImmutableOpenMap { - @Override - public final void messageReceived(SearchRequest request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override public final void messageReceived(final SearchRequest request, final TransportChannel channel, Task task) throws Exception { // We already got the task created on the network layer - no need to create it again on the transport layer diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java index 7de3e5d0980d6..55287d5d50387 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java @@ -318,10 +318,5 @@ public void messageReceived(T request, TransportChannel channel, Task task) thro } } } - - @Override - public void messageReceived(T request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("task parameter is required for this operation"); - } } } From 16e4e7a7cfb5196e02e6fc988f34553dc9d34acc Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 22 Jun 2018 17:15:29 +0200 Subject: [PATCH 77/92] Node selector per client rather than per request (#31471) We have made node selectors configurable per request, but all of other language clients don't allow for that. A good reason not to do so, is that having a different node selector per request breaks round-robin. This commit makes NodeSelector configurable only at client initialization. It also improves the docs on this matter, important given that a single node selector can still affect round-robin. --- .../elasticsearch/client/NodeSelector.java | 6 +-- .../elasticsearch/client/RequestOptions.java | 35 ++----------- .../org/elasticsearch/client/RestClient.java | 16 +++--- .../client/RestClientBuilder.java | 14 ++++- .../client/NodeSelectorTests.java | 2 +- .../client/RequestOptionsTests.java | 9 +--- .../RestClientMultipleHostsIntegTests.java | 46 ++++++++--------- .../client/RestClientMultipleHostsTests.java | 33 +++++------- .../client/RestClientSingleHostTests.java | 2 +- .../elasticsearch/client/RestClientTests.java | 4 +- .../RestClientDocumentation.java | 51 +++++++++++++++---- .../low-level/configuration.asciidoc | 27 ++++++++++ docs/java-rest/low-level/usage.asciidoc | 22 ++++---- .../smoketest/DocsClientYamlTestSuiteIT.java | 5 +- .../test/rest/ESRestTestCase.java | 12 +++-- .../rest/yaml/ClientYamlDocsTestClient.java | 11 ++-- .../test/rest/yaml/ClientYamlTestClient.java | 38 ++++++++++---- .../rest/yaml/ESClientYamlSuiteTestCase.java | 13 ++--- .../section/ClientYamlTestSectionTests.java | 4 +- .../smoketest/XDocsClientYamlTestSuiteIT.java | 6 +-- 20 files changed, 208 insertions(+), 148 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/NodeSelector.java b/client/rest/src/main/java/org/elasticsearch/client/NodeSelector.java index 5f5296fe16b13..b3efa08befaf8 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/NodeSelector.java +++ b/client/rest/src/main/java/org/elasticsearch/client/NodeSelector.java @@ -24,7 +24,7 @@ /** * Selects nodes that can receive requests. Used to keep requests away * from master nodes or to send them to nodes with a particular attribute. - * Use with {@link RequestOptions.Builder#setNodeSelector(NodeSelector)}. + * Use with {@link RestClientBuilder#setNodeSelector(NodeSelector)}. */ public interface NodeSelector { /** @@ -68,7 +68,7 @@ public String toString() { * have the {@code master} role OR it has the data {@code data} * role. */ - NodeSelector NOT_MASTER_ONLY = new NodeSelector() { + NodeSelector SKIP_DEDICATED_MASTERS = new NodeSelector() { @Override public void select(Iterable nodes) { for (Iterator itr = nodes.iterator(); itr.hasNext();) { @@ -84,7 +84,7 @@ public void select(Iterable nodes) { @Override public String toString() { - return "NOT_MASTER_ONLY"; + return "SKIP_DEDICATED_MASTERS"; } }; } diff --git a/client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java b/client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java index 97d150da3d3ff..cf6bd3d49f59e 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java @@ -37,22 +37,18 @@ */ public final class RequestOptions { public static final RequestOptions DEFAULT = new Builder( - Collections.
    emptyList(), NodeSelector.ANY, - HeapBufferedResponseConsumerFactory.DEFAULT).build(); + Collections.
    emptyList(), HeapBufferedResponseConsumerFactory.DEFAULT).build(); private final List
    headers; - private final NodeSelector nodeSelector; private final HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory; private RequestOptions(Builder builder) { this.headers = Collections.unmodifiableList(new ArrayList<>(builder.headers)); - this.nodeSelector = builder.nodeSelector; this.httpAsyncResponseConsumerFactory = builder.httpAsyncResponseConsumerFactory; } public Builder toBuilder() { - Builder builder = new Builder(headers, nodeSelector, httpAsyncResponseConsumerFactory); - return builder; + return new Builder(headers, httpAsyncResponseConsumerFactory); } /** @@ -62,14 +58,6 @@ public List
    getHeaders() { return headers; } - /** - * The selector that chooses which nodes are valid destinations for - * {@link Request}s with these options. - */ - public NodeSelector getNodeSelector() { - return nodeSelector; - } - /** * The {@link HttpAsyncResponseConsumerFactory} used to create one * {@link HttpAsyncResponseConsumer} callback per retry. Controls how the @@ -93,9 +81,6 @@ public String toString() { b.append(headers.get(h).toString()); } } - if (nodeSelector != NodeSelector.ANY) { - b.append(", nodeSelector=").append(nodeSelector); - } if (httpAsyncResponseConsumerFactory != HttpAsyncResponseConsumerFactory.DEFAULT) { b.append(", consumerFactory=").append(httpAsyncResponseConsumerFactory); } @@ -113,24 +98,20 @@ public boolean equals(Object obj) { RequestOptions other = (RequestOptions) obj; return headers.equals(other.headers) - && nodeSelector.equals(other.nodeSelector) && httpAsyncResponseConsumerFactory.equals(other.httpAsyncResponseConsumerFactory); } @Override public int hashCode() { - return Objects.hash(headers, nodeSelector, httpAsyncResponseConsumerFactory); + return Objects.hash(headers, httpAsyncResponseConsumerFactory); } public static class Builder { private final List
    headers; - private NodeSelector nodeSelector; private HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory; - private Builder(List
    headers, NodeSelector nodeSelector, - HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory) { + private Builder(List
    headers, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory) { this.headers = new ArrayList<>(headers); - this.nodeSelector = nodeSelector; this.httpAsyncResponseConsumerFactory = httpAsyncResponseConsumerFactory; } @@ -150,14 +131,6 @@ public void addHeader(String name, String value) { this.headers.add(new ReqHeader(name, value)); } - /** - * Configure the selector that chooses which nodes are valid - * destinations for {@link Request}s with these options - */ - public void setNodeSelector(NodeSelector nodeSelector) { - this.nodeSelector = Objects.requireNonNull(nodeSelector, "nodeSelector cannot be null"); - } - /** * Set the {@link HttpAsyncResponseConsumerFactory} used to create one * {@link HttpAsyncResponseConsumer} callback per retry. Controls how the diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 82039cab5d04c..77c11db455e47 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -48,6 +48,7 @@ import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; import org.elasticsearch.client.DeadHostState.TimeSupplier; +import javax.net.ssl.SSLHandshakeException; import java.io.Closeable; import java.io.IOException; import java.net.ConnectException; @@ -74,7 +75,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import javax.net.ssl.SSLHandshakeException; import static java.util.Collections.singletonList; @@ -108,15 +108,17 @@ public class RestClient implements Closeable { private final AtomicInteger lastNodeIndex = new AtomicInteger(0); private final ConcurrentMap blacklist = new ConcurrentHashMap<>(); private final FailureListener failureListener; + private final NodeSelector nodeSelector; private volatile NodeTuple> nodeTuple; RestClient(CloseableHttpAsyncClient client, long maxRetryTimeoutMillis, Header[] defaultHeaders, - List nodes, String pathPrefix, FailureListener failureListener) { + List nodes, String pathPrefix, FailureListener failureListener, NodeSelector nodeSelector) { this.client = client; this.maxRetryTimeoutMillis = maxRetryTimeoutMillis; this.defaultHeaders = Collections.unmodifiableList(Arrays.asList(defaultHeaders)); this.failureListener = failureListener; this.pathPrefix = pathPrefix; + this.nodeSelector = nodeSelector; setNodes(nodes); } @@ -146,7 +148,7 @@ public static RestClientBuilder builder(HttpHost... hosts) { /** * Replaces the hosts with which the client communicates. * - * @deprecated prefer {@link setNodes} because it allows you + * @deprecated prefer {@link #setNodes(Collection)} because it allows you * to set metadata for use with {@link NodeSelector}s */ @Deprecated @@ -180,8 +182,8 @@ private static List hostsToNodes(HttpHost[] hosts) { throw new IllegalArgumentException("hosts must not be null nor empty"); } List nodes = new ArrayList<>(hosts.length); - for (int i = 0; i < hosts.length; i++) { - nodes.add(new Node(hosts[i])); + for (HttpHost host : hosts) { + nodes.add(new Node(host)); } return nodes; } @@ -509,7 +511,7 @@ void performRequestAsyncNoCatch(Request request, ResponseListener listener) thro setHeaders(httpRequest, request.getOptions().getHeaders()); FailureTrackingResponseListener failureTrackingResponseListener = new FailureTrackingResponseListener(listener); long startTime = System.nanoTime(); - performRequestAsync(startTime, nextNode(request.getOptions().getNodeSelector()), httpRequest, ignoreErrorCodes, + performRequestAsync(startTime, nextNode(), httpRequest, ignoreErrorCodes, request.getOptions().getHttpAsyncResponseConsumerFactory(), failureTrackingResponseListener); } @@ -611,7 +613,7 @@ private void setHeaders(HttpRequest httpRequest, Collection
    requestHeade * that is closest to being revived. * @throws IOException if no nodes are available */ - private NodeTuple> nextNode(NodeSelector nodeSelector) throws IOException { + private NodeTuple> nextNode() throws IOException { NodeTuple> nodeTuple = this.nodeTuple; List hosts = selectHosts(nodeTuple, blacklist, lastNodeIndex, nodeSelector); return new NodeTuple<>(hosts.iterator(), nodeTuple.authCache); diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java index 17d27248dfea9..fb61f4f17c483 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java @@ -55,6 +55,7 @@ public final class RestClientBuilder { private HttpClientConfigCallback httpClientConfigCallback; private RequestConfigCallback requestConfigCallback; private String pathPrefix; + private NodeSelector nodeSelector = NodeSelector.ANY; /** * Creates a new builder instance and sets the hosts that the client will send requests to. @@ -173,6 +174,16 @@ public RestClientBuilder setPathPrefix(String pathPrefix) { return this; } + /** + * Sets the {@link NodeSelector} to be used for all requests. + * @throws NullPointerException if the provided nodeSelector is null + */ + public RestClientBuilder setNodeSelector(NodeSelector nodeSelector) { + Objects.requireNonNull(nodeSelector, "nodeSelector must not be null"); + this.nodeSelector = nodeSelector; + return this; + } + /** * Creates a new {@link RestClient} based on the provided configuration. */ @@ -186,7 +197,8 @@ public CloseableHttpAsyncClient run() { return createHttpClient(); } }); - RestClient restClient = new RestClient(httpClient, maxRetryTimeout, defaultHeaders, nodes, pathPrefix, failureListener); + RestClient restClient = new RestClient(httpClient, maxRetryTimeout, defaultHeaders, nodes, + pathPrefix, failureListener, nodeSelector); httpClient.start(); return restClient; } diff --git a/client/rest/src/test/java/org/elasticsearch/client/NodeSelectorTests.java b/client/rest/src/test/java/org/elasticsearch/client/NodeSelectorTests.java index 868ccdcab757d..83027db325b0b 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/NodeSelectorTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/NodeSelectorTests.java @@ -59,7 +59,7 @@ public void testNotMasterOnly() { Collections.shuffle(nodes, getRandom()); List expected = new ArrayList<>(nodes); expected.remove(masterOnly); - NodeSelector.NOT_MASTER_ONLY.select(nodes); + NodeSelector.SKIP_DEDICATED_MASTERS.select(nodes); assertEquals(expected, nodes); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RequestOptionsTests.java b/client/rest/src/test/java/org/elasticsearch/client/RequestOptionsTests.java index a78be6c126bae..19106792228d9 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RequestOptionsTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RequestOptionsTests.java @@ -114,10 +114,6 @@ static RequestOptions.Builder randomBuilder() { } } - if (randomBoolean()) { - builder.setNodeSelector(mock(NodeSelector.class)); - } - if (randomBoolean()) { builder.setHttpAsyncResponseConsumerFactory(new HeapBufferedResponseConsumerFactory(1)); } @@ -131,15 +127,12 @@ private static RequestOptions copy(RequestOptions options) { private static RequestOptions mutate(RequestOptions options) { RequestOptions.Builder mutant = options.toBuilder(); - int mutationType = between(0, 2); + int mutationType = between(0, 1); switch (mutationType) { case 0: mutant.addHeader("extra", "m"); return mutant.build(); case 1: - mutant.setNodeSelector(mock(NodeSelector.class)); - return mutant.build(); - case 2: mutant.setHttpAsyncResponseConsumerFactory(new HeapBufferedResponseConsumerFactory(5)); return mutant.build(); default: diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java index 7f5915fe3529d..272859e8441e3 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java @@ -75,14 +75,15 @@ public static void startHttpServer() throws Exception { httpServers[i] = httpServer; httpHosts[i] = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()); } - restClient = buildRestClient(); + restClient = buildRestClient(NodeSelector.ANY); } - private static RestClient buildRestClient() { + private static RestClient buildRestClient(NodeSelector nodeSelector) { RestClientBuilder restClientBuilder = RestClient.builder(httpHosts); if (pathPrefix.length() > 0) { restClientBuilder.setPathPrefix((randomBoolean() ? "/" : "") + pathPrefixWithoutLeadingSlash); } + restClientBuilder.setNodeSelector(nodeSelector); return restClientBuilder.build(); } @@ -199,29 +200,28 @@ public void onFailure(Exception exception) { * test what happens after calling */ public void testNodeSelector() throws IOException { - Request request = new Request("GET", "/200"); - RequestOptions.Builder options = request.getOptions().toBuilder(); - options.setNodeSelector(firstPositionNodeSelector()); - request.setOptions(options); - int rounds = between(1, 10); - for (int i = 0; i < rounds; i++) { - /* - * Run the request more than once to verify that the - * NodeSelector overrides the round robin behavior. - */ - if (stoppedFirstHost) { - try { - restClient.performRequest(request); - fail("expected to fail to connect"); - } catch (ConnectException e) { - // Windows isn't consistent here. Sometimes the message is even null! - if (false == System.getProperty("os.name").startsWith("Windows")) { - assertEquals("Connection refused", e.getMessage()); + try (RestClient restClient = buildRestClient(firstPositionNodeSelector())) { + Request request = new Request("GET", "/200"); + int rounds = between(1, 10); + for (int i = 0; i < rounds; i++) { + /* + * Run the request more than once to verify that the + * NodeSelector overrides the round robin behavior. + */ + if (stoppedFirstHost) { + try { + restClient.performRequest(request); + fail("expected to fail to connect"); + } catch (ConnectException e) { + // Windows isn't consistent here. Sometimes the message is even null! + if (false == System.getProperty("os.name").startsWith("Windows")) { + assertEquals("Connection refused", e.getMessage()); + } } + } else { + Response response = restClient.performRequest(request); + assertEquals(httpHosts[0], response.getHost()); } - } else { - Response response = restClient.performRequest(request); - assertEquals(httpHosts[0], response.getHost()); } } } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java index d04b3cbb7554e..e1062076a0dbf 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java @@ -35,9 +35,7 @@ import org.apache.http.message.BasicStatusLine; import org.apache.http.nio.protocol.HttpAsyncRequestProducer; import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; -import org.elasticsearch.client.Node.Roles; import org.junit.After; -import org.junit.Before; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -74,13 +72,11 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { private ExecutorService exec = Executors.newFixedThreadPool(1); - private RestClient restClient; private List nodes; private HostsTrackingFailureListener failureListener; - @Before @SuppressWarnings("unchecked") - public void createRestClient() throws IOException { + public RestClient createRestClient(NodeSelector nodeSelector) { CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class); when(httpClient.execute(any(HttpAsyncRequestProducer.class), any(HttpAsyncResponseConsumer.class), any(HttpClientContext.class), any(FutureCallback.class))).thenAnswer(new Answer>() { @@ -119,7 +115,7 @@ public void run() { } nodes = Collections.unmodifiableList(nodes); failureListener = new HostsTrackingFailureListener(); - restClient = new RestClient(httpClient, 10000, new Header[0], nodes, null, failureListener); + return new RestClient(httpClient, 10000, new Header[0], nodes, null, failureListener, nodeSelector); } /** @@ -131,12 +127,13 @@ public void shutdownExec() { } public void testRoundRobinOkStatusCodes() throws IOException { + RestClient restClient = createRestClient(NodeSelector.ANY); int numIters = RandomNumbers.randomIntBetween(getRandom(), 1, 5); for (int i = 0; i < numIters; i++) { Set hostsSet = hostsSet(); for (int j = 0; j < nodes.size(); j++) { int statusCode = randomOkStatusCode(getRandom()); - Response response = restClient.performRequest(randomHttpMethod(getRandom()), "/" + statusCode); + Response response = restClient.performRequest(new Request(randomHttpMethod(getRandom()), "/" + statusCode)); assertEquals(statusCode, response.getStatusLine().getStatusCode()); assertTrue("host not found: " + response.getHost(), hostsSet.remove(response.getHost())); } @@ -146,6 +143,7 @@ public void testRoundRobinOkStatusCodes() throws IOException { } public void testRoundRobinNoRetryErrors() throws IOException { + RestClient restClient = createRestClient(NodeSelector.ANY); int numIters = RandomNumbers.randomIntBetween(getRandom(), 1, 5); for (int i = 0; i < numIters; i++) { Set hostsSet = hostsSet(); @@ -153,7 +151,7 @@ public void testRoundRobinNoRetryErrors() throws IOException { String method = randomHttpMethod(getRandom()); int statusCode = randomErrorNoRetryStatusCode(getRandom()); try { - Response response = restClient.performRequest(method, "/" + statusCode); + Response response = restClient.performRequest(new Request(method, "/" + statusCode)); if (method.equals("HEAD") && statusCode == 404) { //no exception gets thrown although we got a 404 assertEquals(404, response.getStatusLine().getStatusCode()); @@ -178,9 +176,10 @@ public void testRoundRobinNoRetryErrors() throws IOException { } public void testRoundRobinRetryErrors() throws IOException { + RestClient restClient = createRestClient(NodeSelector.ANY); String retryEndpoint = randomErrorRetryEndpoint(); try { - restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); + restClient.performRequest(new Request(randomHttpMethod(getRandom()), retryEndpoint)); fail("request should have failed"); } catch (ResponseException e) { /* @@ -237,7 +236,7 @@ public void testRoundRobinRetryErrors() throws IOException { for (int j = 0; j < nodes.size(); j++) { retryEndpoint = randomErrorRetryEndpoint(); try { - restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); + restClient.performRequest(new Request(randomHttpMethod(getRandom()), retryEndpoint)); fail("request should have failed"); } catch (ResponseException e) { Response response = e.getResponse(); @@ -269,7 +268,7 @@ public void testRoundRobinRetryErrors() throws IOException { int statusCode = randomErrorNoRetryStatusCode(getRandom()); Response response; try { - response = restClient.performRequest(randomHttpMethod(getRandom()), "/" + statusCode); + response = restClient.performRequest(new Request(randomHttpMethod(getRandom()), "/" + statusCode)); } catch (ResponseException e) { response = e.getResponse(); } @@ -286,7 +285,7 @@ public void testRoundRobinRetryErrors() throws IOException { for (int y = 0; y < i + 1; y++) { retryEndpoint = randomErrorRetryEndpoint(); try { - restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); + restClient.performRequest(new Request(randomHttpMethod(getRandom()), retryEndpoint)); fail("request should have failed"); } catch (ResponseException e) { Response response = e.getResponse(); @@ -323,6 +322,7 @@ public void select(Iterable restClientNodes) { assertTrue(found); } }; + RestClient restClient = createRestClient(firstPositionOnly); int rounds = between(1, 10); for (int i = 0; i < rounds; i++) { /* @@ -330,18 +330,16 @@ public void select(Iterable restClientNodes) { * NodeSelector overrides the round robin behavior. */ Request request = new Request("GET", "/200"); - RequestOptions.Builder options = request.getOptions().toBuilder(); - options.setNodeSelector(firstPositionOnly); - request.setOptions(options); Response response = restClient.performRequest(request); assertEquals(nodes.get(0).getHost(), response.getHost()); } } public void testSetNodes() throws IOException { + RestClient restClient = createRestClient(NodeSelector.SKIP_DEDICATED_MASTERS); List newNodes = new ArrayList<>(nodes.size()); for (int i = 0; i < nodes.size(); i++) { - Roles roles = i == 0 ? new Roles(false, true, true) : new Roles(true, false, false); + Node.Roles roles = i == 0 ? new Node.Roles(false, true, true) : new Node.Roles(true, false, false); newNodes.add(new Node(nodes.get(i).getHost(), null, null, null, roles, null)); } restClient.setNodes(newNodes); @@ -352,9 +350,6 @@ public void testSetNodes() throws IOException { * NodeSelector overrides the round robin behavior. */ Request request = new Request("GET", "/200"); - RequestOptions.Builder options = request.getOptions().toBuilder(); - options.setNodeSelector(NodeSelector.NOT_MASTER_ONLY); - request.setOptions(options); Response response = restClient.performRequest(request); assertEquals(newNodes.get(0).getHost(), response.getHost()); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index 5987fe7dd9849..6b7725666d42d 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -150,7 +150,7 @@ public void run() { node = new Node(new HttpHost("localhost", 9200)); failureListener = new HostsTrackingFailureListener(); restClient = new RestClient(httpClient, 10000, defaultHeaders, - singletonList(node), null, failureListener); + singletonList(node), null, failureListener, NodeSelector.ANY); } /** diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java index 04742ccab4f32..030c2fca6272a 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java @@ -54,7 +54,7 @@ public class RestClientTests extends RestClientTestCase { public void testCloseIsIdempotent() throws IOException { List nodes = singletonList(new Node(new HttpHost("localhost", 9200))); CloseableHttpAsyncClient closeableHttpAsyncClient = mock(CloseableHttpAsyncClient.class); - RestClient restClient = new RestClient(closeableHttpAsyncClient, 1_000, new Header[0], nodes, null, null); + RestClient restClient = new RestClient(closeableHttpAsyncClient, 1_000, new Header[0], nodes, null, null, null); restClient.close(); verify(closeableHttpAsyncClient, times(1)).close(); restClient.close(); @@ -475,7 +475,7 @@ private String assertSelectAllRejected( NodeTuple> nodeTuple, private static RestClient createRestClient() { List nodes = Collections.singletonList(new Node(new HttpHost("localhost", 9200))); return new RestClient(mock(CloseableHttpAsyncClient.class), randomLongBetween(1_000, 30_000), - new Header[] {}, nodes, null, null); + new Header[] {}, nodes, null, null, null); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java index d3a0202747d25..d347353a1fb55 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java +++ b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java @@ -36,7 +36,6 @@ import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.ssl.SSLContexts; import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.HasAttributeNodeSelector; import org.elasticsearch.client.HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory; import org.elasticsearch.client.Node; import org.elasticsearch.client.NodeSelector; @@ -54,6 +53,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.security.KeyStore; +import java.util.Iterator; import java.util.concurrent.CountDownLatch; /** @@ -82,8 +82,7 @@ public class RestClientDocumentation { static { RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder(); builder.addHeader("Authorization", "Bearer " + TOKEN); // <1> - builder.setNodeSelector(NodeSelector.NOT_MASTER_ONLY); // <2> - builder.setHttpAsyncResponseConsumerFactory( // <3> + builder.setHttpAsyncResponseConsumerFactory( // <2> new HeapBufferedResponseConsumerFactory(30 * 1024 * 1024 * 1024)); COMMON_OPTIONS = builder.build(); } @@ -115,6 +114,45 @@ public void testUsage() throws IOException, InterruptedException { builder.setMaxRetryTimeoutMillis(10000); // <1> //end::rest-client-init-max-retry-timeout } + { + //tag::rest-client-init-node-selector + RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); + builder.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS); // <1> + //end::rest-client-init-node-selector + } + { + //tag::rest-client-init-allocation-aware-selector + RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); + builder.setNodeSelector(new NodeSelector() { // <1> + @Override + public void select(Iterable nodes) { + /* + * Prefer any node that belongs to rack_one. If none is around + * we will go to another rack till it's time to try and revive + * some of the nodes that belong to rack_one. + */ + boolean foundOne = false; + for (Node node : nodes) { + String rackId = node.getAttributes().get("rack_id").get(0); + if ("rack_one".equals(rackId)) { + foundOne = true; + break; + } + } + if (foundOne) { + Iterator nodesIt = nodes.iterator(); + while (nodesIt.hasNext()) { + Node node = nodesIt.next(); + String rackId = node.getAttributes().get("rack_id").get(0); + if ("rack_one".equals(rackId) == false) { + nodesIt.remove(); + } + } + } + } + }); + //end::rest-client-init-allocation-aware-selector + } { //tag::rest-client-init-failure-listener RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); @@ -198,13 +236,6 @@ public void onFailure(Exception exception) { request.setOptions(options); //end::rest-client-options-customize-header } - { - //tag::rest-client-options-customize-attribute - RequestOptions.Builder options = COMMON_OPTIONS.toBuilder(); - options.setNodeSelector(new HasAttributeNodeSelector("rack", "c12")); // <1> - request.setOptions(options); - //end::rest-client-options-customize-attribute - } } { HttpEntity[] documents = new HttpEntity[10]; diff --git a/docs/java-rest/low-level/configuration.asciidoc b/docs/java-rest/low-level/configuration.asciidoc index b0753496558bb..0b58c82724b76 100644 --- a/docs/java-rest/low-level/configuration.asciidoc +++ b/docs/java-rest/low-level/configuration.asciidoc @@ -99,3 +99,30 @@ http://docs.oracle.com/javase/8/docs/technotes/guides/net/properties.html[`netwo to your http://docs.oracle.com/javase/8/docs/technotes/guides/security/PolicyFiles.html[Java security policy]. + +=== Node selector + +The client sends each request to one of the configured nodes in round-robin +fashion. Nodes can optionally be filtered through a node selector that needs +to be provided when initializing the client. This is useful when sniffing is +enabled, in case only dedicated master nodes should be hit by HTTP requests. +For each request the client will run the eventually configured node selector +to filter the node candidates, then select the next one in the list out of the +remaining ones. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-init-allocation-aware-selector] +-------------------------------------------------- +<1> Set an allocation aware node selector that allows to pick a node in the +local rack if any available, otherwise go to any other node in any rack. It +acts as a preference rather than a strict requirement, given that it goes to +another rack if none of the local nodes are available, rather than returning +no nodes in such case which would make the client forcibly revive a local node +whenever none of the nodes from the preferred rack is available. + +WARNING: Node selectors that do not consistently select the same set of nodes +will make round-robin behaviour unpredictable and possibly unfair. The +preference example above is fine as it reasons about availability of nodes +which already affects the predictability of round-robin. Node selection should +not depend on other external factors or round-robin will not work properly. diff --git a/docs/java-rest/low-level/usage.asciidoc b/docs/java-rest/low-level/usage.asciidoc index 1f8b302715f42..71fadd98988a3 100644 --- a/docs/java-rest/low-level/usage.asciidoc +++ b/docs/java-rest/low-level/usage.asciidoc @@ -196,6 +196,16 @@ include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-init-failur <1> Set a listener that gets notified every time a node fails, in case actions need to be taken. Used internally when sniffing on failure is enabled. +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-init-node-selector] +-------------------------------------------------- +<1> Set the node selector to be used to filter the nodes the client will send +requests to among the ones that are set to the client itself. This is useful +for instance to prevent sending requests to dedicated master nodes when +sniffing is enabled. By default the client sends requests to every configured +node. + ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-init-request-config-callback] @@ -283,8 +293,7 @@ instance and share it between all requests: include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-options-singleton] -------------------------------------------------- <1> Add any headers needed by all requests. -<2> Set a `NodeSelector`. -<3> Customize the response consumer. +<2> Customize the response consumer. `addHeader` is for headers that are required for authorization or to work with a proxy in front of Elasticsearch. There is no need to set the `Content-Type` @@ -315,15 +324,6 @@ adds an extra header: include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-options-customize-header] -------------------------------------------------- -Or you can send requests to nodes with a particular attribute: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-options-customize-attribute] --------------------------------------------------- -<1> Replace the node selector with one that selects nodes on a particular rack. - - ==== Multiple parallel asynchronous actions The client is quite happy to execute many actions in parallel. The following diff --git a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index 02bc304317e68..a8dd91e8b6de2 100644 --- a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -91,8 +91,9 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { - return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion); + final Version masterVersion) { + return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion, + restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index df92b101bf1fd..672d19d5dc2a6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -30,7 +30,6 @@ import org.apache.http.message.BasicHeader; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.ssl.SSLContexts; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; @@ -47,6 +46,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -381,6 +381,11 @@ protected String getProtocol() { protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException { RestClientBuilder builder = RestClient.builder(hosts); + configureClient(builder, settings); + return builder.build(); + } + + protected static void configureClient(RestClientBuilder builder, Settings settings) throws IOException { String keystorePath = settings.get(TRUSTSTORE_PATH); if (keystorePath != null) { final String keystorePass = settings.get(TRUSTSTORE_PASSWORD); @@ -399,11 +404,10 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(keyStore, null).build(); SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(sslcontext); builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy)); - } catch (KeyStoreException|NoSuchAlgorithmException|KeyManagementException|CertificateException e) { + } catch (KeyStoreException |NoSuchAlgorithmException |KeyManagementException |CertificateException e) { throw new RuntimeException("Error setting up ssl", e); } } - try (ThreadContext threadContext = new ThreadContext(settings)) { Header[] defaultHeaders = new Header[threadContext.getHeaders().size()]; int i = 0; @@ -412,7 +416,6 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE } builder.setDefaultHeaders(defaultHeaders); } - final String requestTimeoutString = settings.get(CLIENT_RETRY_TIMEOUT); if (requestTimeoutString != null) { final TimeValue maxRetryTimeout = TimeValue.parseTimeValue(requestTimeoutString, CLIENT_RETRY_TIMEOUT); @@ -423,7 +426,6 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE final TimeValue socketTimeout = TimeValue.parseTimeValue(socketTimeoutString, CLIENT_SOCKET_TIMEOUT); builder.setRequestConfigCallback(conf -> conf.setSocketTimeout(Math.toIntExact(socketTimeout.getMillis()))); } - return builder.build(); } @SuppressWarnings("unchecked") diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java index 33443aa5b6e38..ddd5837663521 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java @@ -27,6 +27,8 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import java.io.IOException; @@ -47,8 +49,9 @@ public ClientYamlDocsTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { - super(restSpec, restClient, hosts, esVersion, masterVersion); + final Version masterVersion, + final CheckedConsumer clientBuilderConsumer) { + super(restSpec, restClient, hosts, esVersion, masterVersion, clientBuilderConsumer); } @Override @@ -66,9 +69,9 @@ public ClientYamlTestResponse callApi(String apiName, Map params request.addParameter(param.getKey(), param.getValue()); } request.setEntity(entity); - setOptions(request, headers, nodeSelector); + setOptions(request, headers); try { - Response response = restClient.performRequest(request); + Response response = getRestClient(nodeSelector).performRequest(request); return new ClientYamlTestResponse(response); } catch (ResponseException e) { throw new ClientYamlTestResponseException(e); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java index 99da661402855..fdc10a1a246e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java @@ -26,18 +26,22 @@ import org.apache.http.util.EntityUtils; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; +import org.elasticsearch.client.Node; import org.elasticsearch.client.NodeSelector; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestPath; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import java.io.IOException; +import java.io.UncheckedIOException; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; @@ -58,21 +62,24 @@ public class ClientYamlTestClient { private static final ContentType YAML_CONTENT_TYPE = ContentType.create("application/yaml"); private final ClientYamlSuiteRestSpec restSpec; - protected final RestClient restClient; + protected final Map restClients = new HashMap<>(); private final Version esVersion; private final Version masterVersion; + private final CheckedConsumer clientBuilderConsumer; public ClientYamlTestClient( final ClientYamlSuiteRestSpec restSpec, final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { + final Version masterVersion, + final CheckedConsumer clientBuilderConsumer) { assert hosts.size() > 0; this.restSpec = restSpec; - this.restClient = restClient; + this.restClients.put(NodeSelector.ANY, restClient); this.esVersion = esVersion; this.masterVersion = masterVersion; + this.clientBuilderConsumer = clientBuilderConsumer; } public Version getEsVersion() { @@ -172,30 +179,43 @@ public ClientYamlTestResponse callApi(String apiName, Map params requestPath = finalPath.toString(); } - - logger.debug("calling api [{}]", apiName); Request request = new Request(requestMethod, requestPath); for (Map.Entry param : queryStringParams.entrySet()) { request.addParameter(param.getKey(), param.getValue()); } request.setEntity(entity); - setOptions(request, headers, nodeSelector); + setOptions(request, headers); + try { - Response response = restClient.performRequest(request); + Response response = getRestClient(nodeSelector).performRequest(request); return new ClientYamlTestResponse(response); } catch(ResponseException e) { throw new ClientYamlTestResponseException(e); } } - protected static void setOptions(Request request, Map headers, NodeSelector nodeSelector) { + protected RestClient getRestClient(NodeSelector nodeSelector) { + //lazily build a new client in case we need to point to some specific node + return restClients.computeIfAbsent(nodeSelector, selector -> { + RestClient anyClient = restClients.get(NodeSelector.ANY); + RestClientBuilder builder = RestClient.builder(anyClient.getNodes().toArray(new Node[0])); + try { + clientBuilderConsumer.accept(builder); + } catch(IOException e) { + throw new UncheckedIOException(e); + } + builder.setNodeSelector(nodeSelector); + return builder.build(); + }); + } + + protected static void setOptions(Request request, Map headers) { RequestOptions.Builder options = request.getOptions().toBuilder(); for (Map.Entry header : headers.entrySet()) { logger.debug("Adding header {} with value {}", header.getKey(), header.getValue()); options.addHeader(header.getKey(), header.getValue()); } - options.setNodeSelector(nodeSelector); request.setOptions(options); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index c0b5b1e95886c..6afc123520bb0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -47,6 +47,7 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -122,7 +123,7 @@ public static void initializeUseDefaultNumberOfShards() { public void initAndResetContext() throws Exception { if (restTestExecutionContext == null) { // Sniff host metadata in case we need it in the yaml tests - List nodesWithMetadata = sniffHostMetadata(adminClient()); + List nodesWithMetadata = sniffHostMetadata(); client().setNodes(nodesWithMetadata); adminClient().setNodes(nodesWithMetadata); @@ -163,8 +164,9 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { - return new ClientYamlTestClient(restSpec, restClient, hosts, esVersion, masterVersion); + final Version masterVersion) { + return new ClientYamlTestClient(restSpec, restClient, hosts, esVersion, masterVersion, + restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); } /** @@ -195,8 +197,7 @@ public static Iterable createParameters(NamedXContentRegistry executea } //sort the candidates so they will always be in the same order before being shuffled, for repeatability - Collections.sort(tests, - (o1, o2) -> ((ClientYamlTestCandidate)o1[0]).getTestPath().compareTo(((ClientYamlTestCandidate)o2[0]).getTestPath())); + tests.sort(Comparator.comparing(o -> ((ClientYamlTestCandidate) o[0]).getTestPath())); return tests; } @@ -401,7 +402,7 @@ protected boolean randomizeContentType() { /** * Sniff the cluster for host metadata. */ - private List sniffHostMetadata(RestClient client) throws IOException { + private List sniffHostMetadata() throws IOException { ElasticsearchNodesSniffer.Scheme scheme = ElasticsearchNodesSniffer.Scheme.valueOf(getProtocol().toUpperCase(Locale.ROOT)); ElasticsearchNodesSniffer sniffer = new ElasticsearchNodesSniffer( diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java index 87f2d7f9a53f8..5da8601a9f340 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java @@ -73,7 +73,7 @@ public void testAddingDoWithNodeSelectorWithSkip() { section.setSkipSection(new SkipSection(null, singletonList("node_selector"), null)); DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); ApiCallSection apiCall = new ApiCallSection("test"); - apiCall.setNodeSelector(NodeSelector.NOT_MASTER_ONLY); + apiCall.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS); doSection.setApiCallSection(apiCall); section.addExecutableSection(doSection); } @@ -84,7 +84,7 @@ public void testAddingDoWithNodeSelectorWithSkipButNotWarnings() { section.setSkipSection(new SkipSection(null, singletonList("yaml"), null)); DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); ApiCallSection apiCall = new ApiCallSection("test"); - apiCall.setNodeSelector(NodeSelector.NOT_MASTER_ONLY); + apiCall.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS); doSection.setApiCallSection(apiCall); Exception e = expectThrows(IllegalArgumentException.class, () -> section.addExecutableSection(doSection)); assertEquals("Attempted to add a [do] with a [node_selector] section without a corresponding" diff --git a/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java b/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java index af9fb45b8a0c8..0196406c478cd 100644 --- a/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java +++ b/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.test.rest.XPackRestIT; import org.junit.After; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -58,8 +57,9 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { - return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion); + final Version masterVersion) { + return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion, + restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); } /** From 3c42bfad4e68c464ee57420f39e390efd2888761 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 22 Jun 2018 17:24:27 +0200 Subject: [PATCH 78/92] Fix Mockito trying to mock IOException that isn't thrown by method (#31433) (#31527) --- .../xpack/monitoring/exporter/http/HttpExporterTests.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java index ff83621119ef6..a96dc8ebb127a 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java @@ -460,7 +460,6 @@ public void testHttpExporter() throws Exception { } } - @AwaitsFix (bugUrl = "https://github.com/elastic/elasticsearch/issues/31433" ) public void testHttpExporterShutdown() throws Exception { final Config config = createConfig(Settings.EMPTY); final RestClient client = mock(RestClient.class); @@ -469,7 +468,7 @@ public void testHttpExporterShutdown() throws Exception { final MultiHttpResource resource = mock(MultiHttpResource.class); if (sniffer != null && rarely()) { - doThrow(randomFrom(new IOException("expected"), new RuntimeException("expected"))).when(sniffer).close(); + doThrow(new RuntimeException("expected")).when(sniffer).close(); } if (rarely()) { From 7313a987f4a8ba1e39d6105f7d74be9186faa95b Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 22 Jun 2018 17:44:13 +0200 Subject: [PATCH 79/92] fix repository update with the same settings but different type (#31458) fix repository update with the same settings but different type --- .../repositories/RepositoriesService.java | 2 +- .../repositories/RepositoriesServiceIT.java | 96 +++++++++++++++++++ 2 files changed, 97 insertions(+), 1 deletion(-) create mode 100644 server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index 636e108468e82..d5b2a6413e9a9 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -349,7 +349,7 @@ private boolean registerRepository(RepositoryMetaData repositoryMetaData) throws Repository previous = repositories.get(repositoryMetaData.name()); if (previous != null) { RepositoryMetaData previousMetadata = previous.getMetadata(); - if (!previousMetadata.type().equals(repositoryMetaData.type()) && previousMetadata.settings().equals(repositoryMetaData.settings())) { + if (previousMetadata.equals(repositoryMetaData)) { // Previous version is the same as this one - ignore it return false; } diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java new file mode 100644 index 0000000000000..05c9746aa49ac --- /dev/null +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories; + +import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.snapshots.mockstore.MockRepository; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; + +import java.util.Collection; +import java.util.Collections; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; + +public class RepositoriesServiceIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Collections.singletonList(MockRepository.Plugin.class); + } + + public void testUpdateRepository() { + final InternalTestCluster cluster = internalCluster(); + + final String repositoryName = "test-repo"; + + final Client client = client(); + final RepositoriesService repositoriesService = + cluster.getDataOrMasterNodeInstances(RepositoriesService.class).iterator().next(); + final Settings settings = cluster.getDefaultSettings(); + + final Settings.Builder repoSettings = Settings.builder().put("location", randomRepoPath()); + + assertAcked(client.admin().cluster().preparePutRepository(repositoryName) + .setType(FsRepository.TYPE) + .setSettings(repoSettings) + .get()); + + final GetRepositoriesResponse originalGetRepositoriesResponse = + client.admin().cluster().prepareGetRepositories(repositoryName).get(); + + assertThat(originalGetRepositoriesResponse.repositories(), hasSize(1)); + RepositoryMetaData originalRepositoryMetaData = originalGetRepositoriesResponse.repositories().get(0); + + assertThat(originalRepositoryMetaData.type(), equalTo(FsRepository.TYPE)); + + final Repository originalRepository = repositoriesService.repository(repositoryName); + assertThat(originalRepository, instanceOf(FsRepository.class)); + + final boolean updated = randomBoolean(); + final String updatedRepositoryType = updated ? "mock" : FsRepository.TYPE; + + assertAcked(client.admin().cluster().preparePutRepository(repositoryName) + .setType(updatedRepositoryType) + .setSettings(repoSettings) + .get()); + + final GetRepositoriesResponse updatedGetRepositoriesResponse = + client.admin().cluster().prepareGetRepositories(repositoryName).get(); + + assertThat(updatedGetRepositoriesResponse.repositories(), hasSize(1)); + final RepositoryMetaData updatedRepositoryMetaData = updatedGetRepositoriesResponse.repositories().get(0); + + assertThat(updatedRepositoryMetaData.type(), equalTo(updatedRepositoryType)); + + final Repository updatedRepository = repositoriesService.repository(repositoryName); + assertThat(updatedRepository, updated ? not(sameInstance(originalRepository)) : sameInstance(originalRepository)); + } +} From f04c579203299bbbb9397609344b59e6f6f8f18f Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 22 Jun 2018 21:08:11 +0200 Subject: [PATCH 80/92] IndexShard should not return null stats (#31528) IndexShard should not return null stats - empty stats or AlreadyCloseException if it's closed is better --- .../stats/TransportClusterStatsAction.java | 17 ++- .../admin/indices/stats/CommonStats.java | 107 +++++++++--------- .../admin/indices/stats/ShardStats.java | 1 + .../stats/TransportIndicesStatsAction.java | 15 ++- .../elasticsearch/index/shard/IndexShard.java | 16 +-- .../elasticsearch/indices/IndicesService.java | 17 ++- .../index/shard/IndexShardTests.java | 34 ++++++ .../test/InternalTestCluster.java | 10 +- 8 files changed, 148 insertions(+), 69 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 2478045787683..227b1359d4f09 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.cluster.stats; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; @@ -36,6 +37,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.CommitStats; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.node.NodeService; @@ -96,13 +99,23 @@ protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeReq for (IndexShard indexShard : indexService) { if (indexShard.routingEntry() != null && indexShard.routingEntry().active()) { // only report on fully started shards + CommitStats commitStats; + SeqNoStats seqNoStats; + try { + commitStats = indexShard.commitStats(); + seqNoStats = indexShard.seqNoStats(); + } catch (AlreadyClosedException e) { + // shard is closed - no stats is fine + commitStats = null; + seqNoStats = null; + } shardsStats.add( new ShardStats( indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, SHARD_STATS_FLAGS), - indexShard.commitStats(), - indexShard.seqNoStats())); + commitStats, + seqNoStats)); } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java index e244369c0c312..1bf7342be952c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.stats; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -167,57 +168,61 @@ public CommonStats(CommonStatsFlags flags) { public CommonStats(IndicesQueryCache indicesQueryCache, IndexShard indexShard, CommonStatsFlags flags) { CommonStatsFlags.Flag[] setFlags = flags.getFlags(); for (CommonStatsFlags.Flag flag : setFlags) { - switch (flag) { - case Docs: - docs = indexShard.docStats(); - break; - case Store: - store = indexShard.storeStats(); - break; - case Indexing: - indexing = indexShard.indexingStats(flags.types()); - break; - case Get: - get = indexShard.getStats(); - break; - case Search: - search = indexShard.searchStats(flags.groups()); - break; - case Merge: - merge = indexShard.mergeStats(); - break; - case Refresh: - refresh = indexShard.refreshStats(); - break; - case Flush: - flush = indexShard.flushStats(); - break; - case Warmer: - warmer = indexShard.warmerStats(); - break; - case QueryCache: - queryCache = indicesQueryCache.getStats(indexShard.shardId()); - break; - case FieldData: - fieldData = indexShard.fieldDataStats(flags.fieldDataFields()); - break; - case Completion: - completion = indexShard.completionStats(flags.completionDataFields()); - break; - case Segments: - segments = indexShard.segmentStats(flags.includeSegmentFileSizes()); - break; - case Translog: - translog = indexShard.translogStats(); - break; - case RequestCache: - requestCache = indexShard.requestCache().stats(); - break; - case Recovery: - recoveryStats = indexShard.recoveryStats(); - break; - default: - throw new IllegalStateException("Unknown Flag: " + flag); + try { + switch (flag) { + case Docs: + docs = indexShard.docStats(); + break; + case Store: + store = indexShard.storeStats(); + break; + case Indexing: + indexing = indexShard.indexingStats(flags.types()); + break; + case Get: + get = indexShard.getStats(); + break; + case Search: + search = indexShard.searchStats(flags.groups()); + break; + case Merge: + merge = indexShard.mergeStats(); + break; + case Refresh: + refresh = indexShard.refreshStats(); + break; + case Flush: + flush = indexShard.flushStats(); + break; + case Warmer: + warmer = indexShard.warmerStats(); + break; + case QueryCache: + queryCache = indicesQueryCache.getStats(indexShard.shardId()); + break; + case FieldData: + fieldData = indexShard.fieldDataStats(flags.fieldDataFields()); + break; + case Completion: + completion = indexShard.completionStats(flags.completionDataFields()); + break; + case Segments: + segments = indexShard.segmentStats(flags.includeSegmentFileSizes()); + break; + case Translog: + translog = indexShard.translogStats(); + break; + case RequestCache: + requestCache = indexShard.requestCache().stats(); + break; + case Recovery: + recoveryStats = indexShard.recoveryStats(); + break; + default: + throw new IllegalStateException("Unknown Flag: " + flag); + } + } catch (AlreadyClosedException e) { + // shard is closed - no stats is fine } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java index 8b41c4bf90c99..898f3d69456b0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java @@ -70,6 +70,7 @@ public CommonStats getStats() { return this.commonStats; } + @Nullable public CommitStats getCommitStats() { return this.commitStats; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index 9668a1a41fac5..d09aa58938450 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.stats; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; @@ -33,6 +34,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.CommitStats; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.IndicesService; @@ -100,7 +103,17 @@ protected ShardStats shardOperation(IndicesStatsRequest request, ShardRouting sh } CommonStats commonStats = new CommonStats(indicesService.getIndicesQueryCache(), indexShard, request.flags()); + CommitStats commitStats; + SeqNoStats seqNoStats; + try { + commitStats = indexShard.commitStats(); + seqNoStats = indexShard.seqNoStats(); + } catch (AlreadyClosedException e) { + // shard is closed - no stats is fine + commitStats = null; + seqNoStats = null; + } return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), commonStats, - indexShard.commitStats(), indexShard.seqNoStats()); + commitStats, seqNoStats); } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index fb987fe035afa..5bd8f9abc6e04 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -868,21 +868,19 @@ public DocsStats docStats() { } /** - * @return {@link CommitStats} if engine is open, otherwise null + * @return {@link CommitStats} + * @throws AlreadyClosedException if shard is closed */ - @Nullable public CommitStats commitStats() { - Engine engine = getEngineOrNull(); - return engine == null ? null : engine.commitStats(); + return getEngine().commitStats(); } /** - * @return {@link SeqNoStats} if engine is open, otherwise null + * @return {@link SeqNoStats} + * @throws AlreadyClosedException if shard is closed */ - @Nullable public SeqNoStats seqNoStats() { - Engine engine = getEngineOrNull(); - return engine == null ? null : engine.getSeqNoStats(replicationTracker.getGlobalCheckpoint()); + return getEngine().getSeqNoStats(replicationTracker.getGlobalCheckpoint()); } public IndexingStats indexingStats(String... types) { @@ -912,8 +910,6 @@ public StoreStats storeStats() { return store.stats(); } catch (IOException e) { throw new ElasticsearchException("io exception while building 'store stats'", e); - } catch (AlreadyClosedException ex) { - return null; // already closed } } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 5141ca5a0c178..4f535f01da4bf 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -79,6 +79,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.cache.request.ShardRequestCache; +import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -91,6 +92,7 @@ import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; @@ -333,13 +335,24 @@ IndexShardStats indexShardStats(final IndicesService indicesService, final Index return null; } + CommitStats commitStats; + SeqNoStats seqNoStats; + try { + commitStats = indexShard.commitStats(); + seqNoStats = indexShard.seqNoStats(); + } catch (AlreadyClosedException e) { + // shard is closed - no stats is fine + commitStats = null; + seqNoStats = null; + } + return new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, flags), - indexShard.commitStats(), - indexShard.seqNoStats()) + commitStats, + seqNoStats) }); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 31afb5ed42fc0..ac52378fc6b9d 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -73,6 +73,7 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.EngineTestCase; @@ -88,6 +89,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; @@ -3082,4 +3084,36 @@ public void onShardInactive(IndexShard indexShard) { closeShards(primary); } + public void testOnCloseStats() throws IOException { + final IndexShard indexShard = newStartedShard(true); + + for (int i = 0; i < 3; i++) { + indexDoc(indexShard, "_doc", "" + i, "{\"foo\" : \"" + randomAlphaOfLength(10) + "\"}"); + indexShard.refresh("test"); // produce segments + } + + // check stats on closed and on opened shard + if (randomBoolean()) { + closeShards(indexShard); + + expectThrows(AlreadyClosedException.class, () -> indexShard.seqNoStats()); + expectThrows(AlreadyClosedException.class, () -> indexShard.commitStats()); + expectThrows(AlreadyClosedException.class, () -> indexShard.storeStats()); + + } else { + final SeqNoStats seqNoStats = indexShard.seqNoStats(); + assertThat(seqNoStats.getLocalCheckpoint(), equalTo(2L)); + + final CommitStats commitStats = indexShard.commitStats(); + assertThat(commitStats.getGeneration(), equalTo(2L)); + + final StoreStats storeStats = indexShard.storeStats(); + + assertThat(storeStats.sizeInBytes(), greaterThan(0L)); + + closeShards(indexShard); + } + + } + } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index efe775f7415c2..51c4f4d1e32f3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1111,17 +1111,21 @@ private void assertSameSyncIdSameDocs() { IndicesService indexServices = getInstance(IndicesService.class, nodeAndClient.name); for (IndexService indexService : indexServices) { for (IndexShard indexShard : indexService) { - CommitStats commitStats = indexShard.commitStats(); - if (commitStats != null) { // null if the engine is closed or if the shard is recovering + try { + CommitStats commitStats = indexShard.commitStats(); String syncId = commitStats.getUserData().get(Engine.SYNC_COMMIT_ID); if (syncId != null) { long liveDocsOnShard = commitStats.getNumDocs(); if (docsOnShards.get(syncId) != null) { - assertThat("sync id is equal but number of docs does not match on node " + nodeAndClient.name + ". expected " + docsOnShards.get(syncId) + " but got " + liveDocsOnShard, docsOnShards.get(syncId), equalTo(liveDocsOnShard)); + assertThat("sync id is equal but number of docs does not match on node " + + nodeAndClient.name + ". expected " + docsOnShards.get(syncId) + " but got " + + liveDocsOnShard, docsOnShards.get(syncId), equalTo(liveDocsOnShard)); } else { docsOnShards.put(syncId, liveDocsOnShard); } } + } catch (AlreadyClosedException e) { + // the engine is closed or if the shard is recovering } } } From 7a150ec06d5b846caa89520c1388e9c751a0c8af Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 22 Jun 2018 15:03:01 -0700 Subject: [PATCH 81/92] Core: Combine doExecute methods in TransportAction (#31517) TransportAction currently contains 2 doExecute methods, one which takes a the task, and one that does not. The latter is what some subclasses implement, while the first one just calls the latter, dropping the given task. This commit combines these methods, in favor of just always assuming a task is present. --- .../noop/action/bulk/TransportNoopBulkAction.java | 3 ++- .../action/search/TransportNoopSearchAction.java | 3 ++- .../ingest/common/GrokProcessorGetAction.java | 3 ++- .../TransportMultiSearchTemplateAction.java | 3 ++- .../mustache/TransportSearchTemplateAction.java | 3 ++- .../painless/PainlessExecuteAction.java | 3 ++- .../index/rankeval/TransportRankEvalAction.java | 3 ++- .../index/reindex/TransportDeleteByQueryAction.java | 5 ----- .../index/reindex/TransportReindexAction.java | 5 ----- .../index/reindex/TransportUpdateByQueryAction.java | 5 ----- .../node/tasks/get/TransportGetTaskAction.java | 5 ----- .../cluster/remote/TransportRemoteInfoAction.java | 3 ++- .../indices/flush/TransportSyncedFlushAction.java | 3 ++- .../get/TransportGetFieldMappingsAction.java | 3 ++- .../action/bulk/TransportBulkAction.java | 5 ----- .../action/explain/TransportExplainAction.java | 5 +++-- .../fieldcaps/TransportFieldCapabilitiesAction.java | 4 ++-- .../action/get/TransportMultiGetAction.java | 3 ++- .../ingest/SimulatePipelineTransportAction.java | 3 ++- .../action/main/TransportMainAction.java | 3 ++- .../action/search/TransportClearScrollAction.java | 3 ++- .../action/search/TransportMultiSearchAction.java | 3 ++- .../action/search/TransportSearchAction.java | 5 ----- .../action/search/TransportSearchScrollAction.java | 4 ---- .../action/support/TransportAction.java | 6 +----- .../support/broadcast/TransportBroadcastAction.java | 5 ----- .../node/TransportBroadcastByNodeAction.java | 5 ----- .../support/master/TransportMasterNodeAction.java | 6 ------ .../action/support/nodes/TransportNodesAction.java | 6 ------ .../TransportBroadcastReplicationAction.java | 6 ------ .../replication/TransportReplicationAction.java | 5 ----- .../TransportInstanceSingleOperationAction.java | 2 +- .../single/shard/TransportSingleShardAction.java | 2 +- .../action/support/tasks/TransportTasksAction.java | 6 ------ .../TransportMultiTermVectorsAction.java | 3 ++- .../action/update/TransportUpdateAction.java | 13 +++++++------ .../org/elasticsearch/action/ActionModuleTests.java | 3 ++- .../elasticsearch/action/main/MainActionTests.java | 3 ++- .../action/search/MultiSearchActionTookTests.java | 3 ++- .../support/TransportActionFilterChainTests.java | 4 ++-- .../client/node/NodeClientHeadersTests.java | 3 ++- .../xpack/core/action/TransportXPackInfoAction.java | 3 ++- .../action/TransportGetCertificateInfoAction.java | 3 ++- .../core/action/TransportXPackInfoActionTests.java | 3 ++- .../graph/action/TransportGraphExploreAction.java | 3 ++- .../ml/action/TransportDeleteCalendarAction.java | 3 ++- .../action/TransportDeleteCalendarEventAction.java | 4 +++- .../ml/action/TransportDeleteExpiredDataAction.java | 4 +++- .../ml/action/TransportDeleteFilterAction.java | 3 ++- .../action/TransportDeleteModelSnapshotAction.java | 4 +++- .../xpack/ml/action/TransportGetBucketsAction.java | 3 ++- .../ml/action/TransportGetCalendarEventsAction.java | 3 ++- .../ml/action/TransportGetCalendarsAction.java | 3 ++- .../ml/action/TransportGetCategoriesAction.java | 3 ++- .../xpack/ml/action/TransportGetFiltersAction.java | 3 ++- .../ml/action/TransportGetInfluencersAction.java | 3 ++- .../ml/action/TransportGetModelSnapshotsAction.java | 4 +++- .../ml/action/TransportGetOverallBucketsAction.java | 4 +++- .../xpack/ml/action/TransportGetRecordsAction.java | 3 ++- .../xpack/ml/action/TransportMlInfoAction.java | 3 ++- .../action/TransportPostCalendarEventsAction.java | 3 ++- .../ml/action/TransportPreviewDatafeedAction.java | 3 ++- .../xpack/ml/action/TransportPutCalendarAction.java | 3 ++- .../xpack/ml/action/TransportPutFilterAction.java | 3 ++- .../ml/action/TransportUpdateCalendarJobAction.java | 3 ++- .../ml/action/TransportUpdateFilterAction.java | 3 ++- .../action/TransportUpdateModelSnapshotAction.java | 4 +++- .../ml/action/TransportValidateDetectorAction.java | 3 ++- .../ml/action/TransportValidateJobConfigAction.java | 4 +++- .../action/TransportMonitoringBulkAction.java | 3 ++- .../rollup/action/TransportGetRollupCapsAction.java | 3 ++- .../rollup/action/TransportRollupSearchAction.java | 2 +- .../action/role/TransportDeleteRoleAction.java | 3 ++- .../action/role/TransportGetRolesAction.java | 3 ++- .../action/role/TransportPutRoleAction.java | 3 ++- .../TransportDeleteRoleMappingAction.java | 4 ++-- .../rolemapping/TransportGetRoleMappingsAction.java | 4 ++-- .../rolemapping/TransportPutRoleMappingAction.java | 4 ++-- .../saml/TransportSamlAuthenticateAction.java | 4 ++-- .../saml/TransportSamlInvalidateSessionAction.java | 4 ++-- .../action/saml/TransportSamlLogoutAction.java | 4 ++-- .../TransportSamlPrepareAuthenticationAction.java | 5 +++-- .../action/token/TransportCreateTokenAction.java | 3 ++- .../token/TransportInvalidateTokenAction.java | 3 ++- .../action/token/TransportRefreshTokenAction.java | 3 ++- .../action/user/TransportAuthenticateAction.java | 3 ++- .../action/user/TransportChangePasswordAction.java | 3 ++- .../action/user/TransportDeleteUserAction.java | 3 ++- .../action/user/TransportGetUsersAction.java | 3 ++- .../action/user/TransportHasPrivilegesAction.java | 3 ++- .../action/user/TransportPutUserAction.java | 3 ++- .../action/user/TransportSetEnabledAction.java | 3 ++- .../action/role/TransportDeleteRoleActionTests.java | 7 ++++--- .../action/role/TransportGetRolesActionTests.java | 9 +++++---- .../action/role/TransportPutRoleActionTests.java | 7 ++++--- .../TransportGetRoleMappingsActionTests.java | 7 ++++--- .../TransportPutRoleMappingActionTests.java | 3 ++- .../TransportSamlInvalidateSessionActionTests.java | 3 ++- .../action/saml/TransportSamlLogoutActionTests.java | 3 ++- .../user/TransportAuthenticateActionTests.java | 7 ++++--- .../user/TransportChangePasswordActionTests.java | 9 +++++---- .../action/user/TransportDeleteUserActionTests.java | 11 ++++++----- .../action/user/TransportGetUsersActionTests.java | 13 +++++++------ .../user/TransportHasPrivilegesActionTests.java | 9 +++++---- .../action/user/TransportPutUserActionTests.java | 11 ++++++----- .../action/user/TransportSetEnabledActionTests.java | 11 ++++++----- .../sql/plugin/TransportSqlClearCursorAction.java | 3 ++- .../xpack/sql/plugin/TransportSqlQueryAction.java | 3 ++- .../sql/plugin/TransportSqlTranslateAction.java | 3 ++- .../transport/actions/WatcherTransportAction.java | 6 ++++-- .../actions/delete/TransportDeleteWatchAction.java | 3 ++- 111 files changed, 240 insertions(+), 224 deletions(-) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java index 0f6748b5e826c..b9520e667be67 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; public class TransportNoopBulkAction extends HandledTransportAction { @@ -42,7 +43,7 @@ public TransportNoopBulkAction(Settings settings, TransportService transportServ } @Override - protected void doExecute(BulkRequest request, ActionListener listener) { + protected void doExecute(Task task, BulkRequest request, ActionListener listener) { final int itemCount = request.requests().size(); // simulate at least a realistic amount of data that gets serialized BulkItemResponse[] bulkItemResponses = new BulkItemResponse[itemCount]; diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java index 040c2d5f52d1d..099b5a3a8b07c 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileShardResults; import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.Collections; @@ -44,7 +45,7 @@ public TransportNoopSearchAction(Settings settings, TransportService transportSe } @Override - protected void doExecute(SearchRequest request, ActionListener listener) { + protected void doExecute(Task task, SearchRequest request, ActionListener listener) { listener.onResponse(new SearchResponse(new InternalSearchResponse( new SearchHits( new SearchHit[0], 0L, 0.0f), diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index 85a8f5e48079c..2fae5d77bcce3 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -41,6 +41,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -118,7 +119,7 @@ public TransportAction(Settings settings, TransportService transportService, Act } @Override - protected void doExecute(Request request, ActionListener listener) { + protected void doExecute(Task task, Request request, ActionListener listener) { try { listener.onResponse(new Response(GROK_PATTERNS)); } catch (Exception e) { diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java index 79fea3c6d62cd..6e0baed9be879 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; @@ -54,7 +55,7 @@ public TransportMultiSearchTemplateAction(Settings settings, TransportService tr } @Override - protected void doExecute(MultiSearchTemplateRequest request, ActionListener listener) { + protected void doExecute(Task task, MultiSearchTemplateRequest request, ActionListener listener) { List originalSlots = new ArrayList<>(); MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); multiSearchRequest.indicesOptions(request.indicesOptions()); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index 45de41f51a3d2..2f880b56dc005 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -38,6 +38,7 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -63,7 +64,7 @@ public TransportSearchTemplateAction(Settings settings, TransportService transpo } @Override - protected void doExecute(SearchTemplateRequest request, ActionListener listener) { + protected void doExecute(Task task, SearchTemplateRequest request, ActionListener listener) { final SearchTemplateResponse response = new SearchTemplateResponse(); try { SearchRequest searchRequest = convert(request, response, scriptService, xContentRegistry); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index 1bfd013b0d5a5..01139f6cf2e70 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -48,6 +48,7 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -285,7 +286,7 @@ public TransportAction(Settings settings, TransportService transportService, this.scriptService = scriptService; } @Override - protected void doExecute(Request request, ActionListener listener) { + protected void doExecute(Task task, Request request, ActionListener listener) { switch (request.context) { case PAINLESS_TEST: PainlessTestScript.Factory factory = scriptService.compile(request.script, PainlessTestScript.CONTEXT); diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java index 81e9b5cf42c4c..80d3d674aed3b 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java @@ -40,6 +40,7 @@ import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -83,7 +84,7 @@ public TransportRankEvalAction(Settings settings, ActionFilters actionFilters, C } @Override - protected void doExecute(RankEvalRequest request, ActionListener listener) { + protected void doExecute(Task task, RankEvalRequest request, ActionListener listener) { RankEvalSpec evaluationSpecification = request.getRankEvalSpec(); EvaluationMetric metric = evaluationSpecification.getMetric(); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java index 9be54f4f76104..c1defe56adc6f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -67,9 +67,4 @@ public void doExecute(Task task, DeleteByQueryRequest request, ActionListener listener) { - throw new UnsupportedOperationException("task required"); - } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 62be1e2cb613a..e54b5f50ae674 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -134,11 +134,6 @@ protected void doExecute(Task task, ReindexRequest request, ActionListener listener) { - throw new UnsupportedOperationException("task required"); - } - static void checkRemoteWhitelist(CharacterRunAutomaton whitelist, RemoteInfo remoteInfo) { if (remoteInfo == null) { return; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index c497374d944e8..34ae3fdd0c62f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -78,11 +78,6 @@ protected void doExecute(Task task, UpdateByQueryRequest request, ActionListener ); } - @Override - protected void doExecute(UpdateByQueryRequest request, ActionListener listener) { - throw new UnsupportedOperationException("task required"); - } - /** * Simple implementation of update-by-query using scrolling and bulk. */ diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java index 9e841b97e7e07..927d2e47680c5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java @@ -81,11 +81,6 @@ public TransportGetTaskAction(Settings settings, ThreadPool threadPool, Transpor this.xContentRegistry = xContentRegistry; } - @Override - protected void doExecute(GetTaskRequest request, ActionListener listener) { - throw new UnsupportedOperationException("Task is required"); - } - @Override protected void doExecute(Task thisTask, GetTaskRequest request, ActionListener listener) { if (clusterService.localNode().getId().equals(request.getTaskId().getNodeId())) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java index edf8eae187345..743a35998355c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java @@ -22,6 +22,7 @@ import java.util.function.Supplier; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.action.search.SearchTransportService; import org.elasticsearch.action.support.ActionFilters; @@ -45,7 +46,7 @@ public TransportRemoteInfoAction(Settings settings, TransportService transportSe } @Override - protected void doExecute(RemoteInfoRequest remoteInfoRequest, ActionListener listener) { + protected void doExecute(Task task, RemoteInfoRequest remoteInfoRequest, ActionListener listener) { listener.onResponse(new RemoteInfoResponse(remoteClusterService.getRemoteConnectionInfos().collect(toList()))); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java index 9762fe6cbb814..1ab46bfd926c6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.flush.SyncedFlushService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; /** @@ -45,7 +46,7 @@ public TransportSyncedFlushAction(Settings settings, TransportService transportS } @Override - protected void doExecute(SyncedFlushRequest request, ActionListener listener) { + protected void doExecute(Task task, SyncedFlushRequest request, ActionListener listener) { syncedFlushService.attemptSyncedFlush(request.indices(), request.indicesOptions(), listener); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java index bf61fc5e8633f..cf2ba48dc8771 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -53,7 +54,7 @@ public TransportGetFieldMappingsAction(Settings settings, TransportService trans } @Override - protected void doExecute(GetFieldMappingsRequest request, final ActionListener listener) { + protected void doExecute(Task task, GetFieldMappingsRequest request, final ActionListener listener) { ClusterState clusterState = clusterService.state(); String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); final AtomicInteger indexCounter = new AtomicInteger(); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 3ac75eb5869d7..247970dafcee3 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -123,11 +123,6 @@ public TransportBulkAction(Settings settings, ThreadPool threadPool, TransportSe clusterService.addStateApplier(this.ingestForwarder); } - @Override - protected final void doExecute(final BulkRequest bulkRequest, final ActionListener listener) { - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, BulkRequest bulkRequest, ActionListener listener) { if (bulkRequest.hasIndexRequestsWithPipelines()) { diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index 18c1ea41e95b9..5ea178f595acf 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -44,6 +44,7 @@ import org.elasticsearch.search.internal.ShardSearchLocalRequest; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.rescore.Rescorer; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -67,9 +68,9 @@ public TransportExplainAction(Settings settings, ThreadPool threadPool, ClusterS } @Override - protected void doExecute(ExplainRequest request, ActionListener listener) { + protected void doExecute(Task task, ExplainRequest request, ActionListener listener) { request.nowInMillis = System.currentTimeMillis(); - super.doExecute(request, listener); + super.doExecute(task, request, listener); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index f86d0f1d273f0..ef0d19a265583 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteClusterService; @@ -63,8 +64,7 @@ public TransportFieldCapabilitiesAction(Settings settings, TransportService tran } @Override - protected void doExecute(FieldCapabilitiesRequest request, - final ActionListener listener) { + protected void doExecute(Task task, FieldCapabilitiesRequest request, final ActionListener listener) { final ClusterState clusterState = clusterService.state(); final Map remoteClusterIndices = remoteClusterService.groupIndices(request.indicesOptions(), request.indices(), idx -> indexNameExpressionResolver.hasIndexOrAlias(idx, clusterState)); diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java index f7ad0f6c87fd0..d7770148c95a9 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -53,7 +54,7 @@ public TransportMultiGetAction(Settings settings, TransportService transportServ } @Override - protected void doExecute(final MultiGetRequest request, final ActionListener listener) { + protected void doExecute(Task task, final MultiGetRequest request, final ActionListener listener) { ClusterState clusterState = clusterService.state(); clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java index 599d3a3d60f5a..2e898c1895f9a 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.node.NodeService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -48,7 +49,7 @@ public SimulatePipelineTransportAction(Settings settings, ThreadPool threadPool, } @Override - protected void doExecute(SimulatePipelineRequest request, ActionListener listener) { + protected void doExecute(Task task, SimulatePipelineRequest request, ActionListener listener) { final Map source = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2(); final SimulatePipelineRequest.Parsed simulateRequest; diff --git a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java index 48612a68901dd..d3a54bf7e45ba 100644 --- a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; public class TransportMainAction extends HandledTransportAction { @@ -44,7 +45,7 @@ public TransportMainAction(Settings settings, TransportService transportService, } @Override - protected void doExecute(MainRequest request, ActionListener listener) { + protected void doExecute(Task task, MainRequest request, ActionListener listener) { ClusterState clusterState = clusterService.state(); assert Node.NODE_NAME_SETTING.exists(settings); final boolean available = clusterState.getBlocks().hasGlobalBlock(RestStatus.SERVICE_UNAVAILABLE) == false; diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java index fe5ab3d9a379c..f1c9fd5c545fb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; public class TransportClearScrollAction extends HandledTransportAction { @@ -43,7 +44,7 @@ public TransportClearScrollAction(Settings settings, TransportService transportS } @Override - protected void doExecute(ClearScrollRequest request, final ActionListener listener) { + protected void doExecute(Task task, ClearScrollRequest request, final ActionListener listener) { Runnable runnable = new ClearScrollController(request, listener, clusterService.state().nodes(), logger, searchTransportService); runnable.run(); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index df3214af8ea69..b771a135d5f29 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -70,7 +71,7 @@ public TransportMultiSearchAction(Settings settings, ThreadPool threadPool, Tran } @Override - protected void doExecute(MultiSearchRequest request, ActionListener listener) { + protected void doExecute(Task task, MultiSearchRequest request, ActionListener listener) { final long relativeStartTime = relativeTimeProvider.getAsLong(); ClusterState clusterState = clusterService.state(); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 1b9a8353253d1..5c0b2eb39ed51 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -362,11 +362,6 @@ static GroupShardsIterator mergeShardsIterators(GroupShards return new GroupShardsIterator<>(shards); } - @Override - protected final void doExecute(SearchRequest searchRequest, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required"); - } - private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest, GroupShardsIterator shardIterators, SearchTimeProvider timeProvider, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java index 953152eaad003..70a50d44fb0e6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java @@ -50,10 +50,6 @@ public TransportSearchScrollAction(Settings settings, TransportService transport this.searchPhaseController = searchPhaseController; } - @Override - protected final void doExecute(SearchScrollRequest request, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required"); - } @Override protected void doExecute(Task task, SearchScrollRequest request, ActionListener listener) { try { diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java index 85167cfe0f8e9..9db5bfd84b5e3 100644 --- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -123,11 +123,7 @@ public final void execute(Task task, Request request, ActionListener l requestFilterChain.proceed(task, actionName, request, listener); } - protected void doExecute(Task task, Request request, ActionListener listener) { - doExecute(request, listener); - } - - protected abstract void doExecute(Request request, ActionListener listener); + protected abstract void doExecute(Task task, Request request, ActionListener listener); private static class RequestFilterChain implements ActionFilterChain { diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index 1bec46fd1213e..45a65a31390e6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -75,11 +75,6 @@ protected void doExecute(Task task, Request request, ActionListener li new AsyncBroadcastAction(task, request, listener).start(); } - @Override - protected final void doExecute(Request request, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - protected abstract Response newResponse(Request request, AtomicReferenceArray shardsResponses, ClusterState clusterState); protected abstract ShardRequest newShardRequest(int numShards, ShardRouting shard, Request request); diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index 348162b8c33bd..9079238b7b62e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -221,11 +221,6 @@ private Response newResponse( */ protected abstract ClusterBlockException checkRequestBlock(ClusterState state, Request request, String[] concreteIndices); - @Override - protected final void doExecute(Request request, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, Request request, ActionListener listener) { new AsyncAction(task, request, listener).start(); diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 8f198c4b82e6f..934241a8fcb58 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -115,12 +115,6 @@ protected boolean localExecute(Request request) { protected abstract ClusterBlockException checkBlock(Request request, ClusterState state); - @Override - protected final void doExecute(final Request request, ActionListener listener) { - logger.warn("attempt to execute a master node operation without task"); - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, final Request request, ActionListener listener) { new AsyncSingleAction(task, request, listener).start(); diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 6a9ac53f7bebd..b232d849223b9 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -77,12 +77,6 @@ protected TransportNodesAction(Settings settings, String actionName, ThreadPool transportNodeAction, nodeRequest, nodeExecutor, new NodeTransportHandler()); } - @Override - protected final void doExecute(NodesRequest request, ActionListener listener) { - logger.warn("attempt to execute a transport nodes operation without a task"); - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, NodesRequest request, ActionListener listener) { new AsyncAction(task, request, listener).start(); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index 50e0cc3af7f7b..aa3784efdd04f 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -66,12 +66,6 @@ public TransportBroadcastReplicationAction(String name, Supplier reques this.indexNameExpressionResolver = indexNameExpressionResolver; } - - @Override - protected final void doExecute(final Request request, final ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, Request request, ActionListener listener) { final ClusterState clusterState = clusterService.state(); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index c31ee81a802a5..53d9752f4edc6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -163,11 +163,6 @@ protected void registerRequestHandlers(String actionName, TransportService trans new ReplicaOperationTransportHandler()); } - @Override - protected final void doExecute(Request request, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, Request request, ActionListener listener) { new ReroutePhase((ReplicationTask) task, request, listener).run(); diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index 2d8ccb6e524f4..e8e710aa81f2c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -74,7 +74,7 @@ protected TransportInstanceSingleOperationAction(Settings settings, String actio } @Override - protected void doExecute(Request request, ActionListener listener) { + protected void doExecute(Task task, Request request, ActionListener listener) { new AsyncSingleAction(request, listener).start(); } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 7116061640f3e..7a83b0c455da4 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -97,7 +97,7 @@ protected boolean isSubAction() { } @Override - protected void doExecute(Request request, ActionListener listener) { + protected void doExecute(Task task, Request request, ActionListener listener) { new AsyncSingleAction(request, listener).start(); } diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index ee116d9f957c6..38a0d96600ce8 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -90,12 +90,6 @@ protected TransportTasksAction(Settings settings, String actionName, ClusterServ transportService.registerRequestHandler(transportNodeAction, NodeTaskRequest::new, nodeExecutor, new NodeTransportHandler()); } - @Override - protected final void doExecute(TasksRequest request, ActionListener listener) { - logger.warn("attempt to execute a transport tasks operation without a task"); - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, TasksRequest request, ActionListener listener) { new AsyncAction(task, request, listener).start(); diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java index f66d843ea6db4..b7ee052b2ba82 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -54,7 +55,7 @@ public TransportMultiTermVectorsAction(Settings settings, TransportService trans } @Override - protected void doExecute(final MultiTermVectorsRequest request, final ActionListener listener) { + protected void doExecute(Task task, final MultiTermVectorsRequest request, final ActionListener listener) { ClusterState clusterState = clusterService.state(); clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ); diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 9faf22d464cbb..299a2ce812396 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -53,6 +53,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -111,13 +112,13 @@ public static void resolveAndValidateRouting(MetaData metaData, String concreteI } @Override - protected void doExecute(final UpdateRequest request, final ActionListener listener) { + protected void doExecute(Task task, final UpdateRequest request, final ActionListener listener) { // if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) { client.admin().indices().create(new CreateIndexRequest().index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener() { @Override public void onResponse(CreateIndexResponse result) { - innerExecute(request, listener); + innerExecute(task, request, listener); } @Override @@ -125,7 +126,7 @@ public void onFailure(Exception e) { if (unwrapCause(e) instanceof ResourceAlreadyExistsException) { // we have the index, do it try { - innerExecute(request, listener); + innerExecute(task, request, listener); } catch (Exception inner) { inner.addSuppressed(e); listener.onFailure(inner); @@ -136,12 +137,12 @@ public void onFailure(Exception e) { } }); } else { - innerExecute(request, listener); + innerExecute(task, request, listener); } } - private void innerExecute(final UpdateRequest request, final ActionListener listener) { - super.doExecute(request, listener); + private void innerExecute(final Task task, final UpdateRequest request, final ActionListener listener) { + super.doExecute(task, request, listener); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 1767358eca8e4..1fa4197e74900 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest.Method; import org.elasticsearch.rest.action.RestMainAction; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -84,7 +85,7 @@ protected FakeTransportAction(Settings settings, String actionName, ActionFilter } @Override - protected void doExecute(FakeRequest request, ActionListener listener) { + protected void doExecute(Task task, FakeRequest request, ActionListener listener) { } } class FakeAction extends Action { diff --git a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java index 654a4a3649c35..2c2694116b216 100644 --- a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; @@ -69,7 +70,7 @@ public void testMainActionClusterAvailable() { x -> null, null, Collections.emptySet()); TransportMainAction action = new TransportMainAction(settings, transportService, mock(ActionFilters.class), clusterService); AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(new MainRequest(), new ActionListener() { + action.doExecute(mock(Task.class), new MainRequest(), new ActionListener() { @Override public void onResponse(MainResponse mainResponse) { responseRef.set(mainResponse); diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index 94bc6b01ec168..fc3fb34a6cb19 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -106,7 +107,7 @@ private void runTestTook(boolean controlledClock) throws Exception { TransportMultiSearchAction action = createTransportMultiSearchAction(controlledClock, expected); - action.doExecute(multiSearchRequest, new ActionListener() { + action.doExecute(mock(Task.class), multiSearchRequest, new ActionListener() { @Override public void onResponse(MultiSearchResponse multiSearchResponse) { if (controlledClock) { diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 479ed2ad60d51..9df73c8c95543 100644 --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -83,7 +83,7 @@ public void testActionFiltersRequest() throws ExecutionException, InterruptedExc new TransportAction(Settings.EMPTY, actionName, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override - protected void doExecute(TestRequest request, ActionListener listener) { + protected void doExecute(Task task, TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); } }; @@ -160,7 +160,7 @@ public void exe TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override - protected void doExecute(TestRequest request, ActionListener listener) { + protected void doExecute(Task task, TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); } }; diff --git a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index f473188a5424b..a689de9a5d324 100644 --- a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.client.AbstractClientHeadersTestCase; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; @@ -63,7 +64,7 @@ private InternalTransportAction(Settings settings, String actionName, ThreadPool } @Override - protected void doExecute(ActionRequest request, ActionListener listener) { + protected void doExecute(Task task, ActionRequest request, ActionListener listener) { listener.onFailure(new InternalException(actionName)); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java index 23dd0e12d44ff..b149fa300832b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.license.XPackInfoResponse; import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; import org.elasticsearch.license.XPackInfoResponse.LicenseInfo; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackBuild; import org.elasticsearch.xpack.core.XPackFeatureSet; @@ -37,7 +38,7 @@ public TransportXPackInfoAction(Settings settings, TransportService transportSer } @Override - protected void doExecute(XPackInfoRequest request, ActionListener listener) { + protected void doExecute(Task task, XPackInfoRequest request, ActionListener listener) { XPackInfoResponse.BuildInfo buildInfo = null; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java index a70d0693d5b37..9337f7f6b0c22 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.ssl.cert.CertificateInfo; @@ -32,7 +33,7 @@ public TransportGetCertificateInfoAction(Settings settings, TransportService tra } @Override - protected void doExecute(GetCertificateInfoAction.Request request, + protected void doExecute(Task task, GetCertificateInfoAction.Request request, ActionListener listener) { try { Collection certificates = sslService.getLoadedCertificates(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java index e17f7a48cbfeb..01991670d5565 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.XPackInfoResponse; import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackFeatureSet; @@ -82,7 +83,7 @@ public void testDoExecute() throws Exception { final CountDownLatch latch = new CountDownLatch(1); final AtomicReference response = new AtomicReference<>(); final AtomicReference error = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(XPackInfoResponse infoResponse) { response.set(infoResponse); diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index d45dd640a49ff..4eb136040e988 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -35,6 +35,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; @@ -92,7 +93,7 @@ public TransportGraphExploreAction(Settings settings, ThreadPool threadPool, Nod } @Override - protected void doExecute(GraphExploreRequest request, ActionListener listener) { + protected void doExecute(Task task, GraphExploreRequest request, ActionListener listener) { if (licenseState.isGraphAllowed()) { new AsyncGraphAction(request, listener).start(); } else { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java index 38d88341ce3de..9c712efe693ca 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction; @@ -47,7 +48,7 @@ public TransportDeleteCalendarAction(Settings settings, TransportService transpo } @Override - protected void doExecute(DeleteCalendarAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteCalendarAction.Request request, ActionListener listener) { final String calendarId = request.getCalendarId(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java index 7b2311eba2d2d..52896751de1d3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarEventAction; @@ -50,7 +51,8 @@ public TransportDeleteCalendarEventAction(Settings settings, TransportService tr } @Override - protected void doExecute(DeleteCalendarEventAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteCalendarEventAction.Request request, + ActionListener listener) { final String eventId = request.getEventId(); ActionListener calendarListener = ActionListener.wrap( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java index b9ff2cb98b5d6..f7dfb8adb9e2b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; @@ -46,7 +47,8 @@ public TransportDeleteExpiredDataAction(Settings settings, ThreadPool threadPool } @Override - protected void doExecute(DeleteExpiredDataAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteExpiredDataAction.Request request, + ActionListener listener) { logger.info("Deleting expired data"); threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> deleteExpiredData(listener)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java index 79693e2279486..c7d3d64c58cea 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetadata; @@ -52,7 +53,7 @@ public TransportDeleteFilterAction(Settings settings, TransportService transport } @Override - protected void doExecute(DeleteFilterAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteFilterAction.Request request, ActionListener listener) { final String filterId = request.getFilterId(); ClusterState state = clusterService.state(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java index ad22f84f6d468..c63f8a4405b89 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -47,7 +48,8 @@ public TransportDeleteModelSnapshotAction(Settings settings, TransportService tr } @Override - protected void doExecute(DeleteModelSnapshotAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteModelSnapshotAction.Request request, + ActionListener listener) { // Verify the snapshot exists jobProvider.modelSnapshots( request.getJobId(), 0, 1, null, null, null, true, request.getSnapshotId(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java index 7b8128982ee84..63a1efe471a47 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetBucketsAction; import org.elasticsearch.xpack.ml.job.JobManager; @@ -36,7 +37,7 @@ public TransportGetBucketsAction(Settings settings, TransportService transportSe } @Override - protected void doExecute(GetBucketsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetBucketsAction.Request request, ActionListener listener) { jobManager.getJobOrThrowIfUnknown(request.getJobId()); BucketsQueryBuilder query = diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java index 5647c72d44bd2..2e30ad80d859a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.GetCalendarEventsAction; @@ -43,7 +44,7 @@ public TransportGetCalendarEventsAction(Settings settings, TransportService tran } @Override - protected void doExecute(GetCalendarEventsAction.Request request, + protected void doExecute(Task task, GetCalendarEventsAction.Request request, ActionListener listener) { ActionListener calendarExistsListener = ActionListener.wrap( r -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java index 46c252004a3c5..ed837139ade1c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetCalendarsAction; import org.elasticsearch.xpack.core.ml.action.util.PageParams; @@ -33,7 +34,7 @@ public TransportGetCalendarsAction(Settings settings, TransportService transport } @Override - protected void doExecute(GetCalendarsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetCalendarsAction.Request request, ActionListener listener) { final String calendarId = request.getCalendarId(); if (request.getCalendarId() != null && GetCalendarsAction.Request.ALL.equals(request.getCalendarId()) == false) { getCalendar(calendarId, listener); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java index b3a2d9bab0c35..0e0481f394ccf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetCategoriesAction; import org.elasticsearch.xpack.ml.job.JobManager; @@ -35,7 +36,7 @@ public TransportGetCategoriesAction(Settings settings, TransportService transpor } @Override - protected void doExecute(GetCategoriesAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetCategoriesAction.Request request, ActionListener listener) { jobManager.getJobOrThrowIfUnknown(request.getJobId()); Integer from = request.getPageParams() != null ? request.getPageParams().getFrom() : null; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index c8cd7a0d63bb7..83a4c12b819ce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -26,6 +26,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.GetFiltersAction; @@ -56,7 +57,7 @@ public TransportGetFiltersAction(Settings settings, TransportService transportSe } @Override - protected void doExecute(GetFiltersAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetFiltersAction.Request request, ActionListener listener) { final String filterId = request.getFilterId(); if (!Strings.isNullOrEmpty(filterId)) { getFilter(filterId, listener); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java index 9d45559bc111c..125e31fcf63cf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetInfluencersAction; import org.elasticsearch.xpack.ml.job.JobManager; @@ -36,7 +37,7 @@ public TransportGetInfluencersAction(Settings settings, TransportService transpo } @Override - protected void doExecute(GetInfluencersAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetInfluencersAction.Request request, ActionListener listener) { jobManager.getJobOrThrowIfUnknown(request.getJobId()); InfluencersQueryBuilder.InfluencersQuery query = new InfluencersQueryBuilder() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java index d37cdd90e3aeb..b69db8d48d60f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; @@ -35,7 +36,8 @@ public TransportGetModelSnapshotsAction(Settings settings, TransportService tran } @Override - protected void doExecute(GetModelSnapshotsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetModelSnapshotsAction.Request request, + ActionListener listener) { logger.debug("Get model snapshots for job {} snapshot ID {}. from = {}, size = {}" + " start = '{}', end='{}', sort={} descending={}", request.getJobId(), request.getSnapshotId(), request.getPageParams().getFrom(), request.getPageParams().getSize(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index d412129b47a31..c0792a45b29d3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.min.Min; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetOverallBucketsAction; @@ -74,7 +75,8 @@ public TransportGetOverallBucketsAction(Settings settings, ThreadPool threadPool } @Override - protected void doExecute(GetOverallBucketsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetOverallBucketsAction.Request request, + ActionListener listener) { QueryPage jobsPage = jobManager.expandJobs(request.getJobId(), request.allowNoJobs(), clusterService.state()); if (jobsPage.count() == 0) { listener.onResponse(new GetOverallBucketsAction.Response()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java index 6943cd9a01c5e..b1556ba6e45c7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; import org.elasticsearch.xpack.ml.job.JobManager; @@ -36,7 +37,7 @@ public TransportGetRecordsAction(Settings settings, TransportService transportSe } @Override - protected void doExecute(GetRecordsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetRecordsAction.Request request, ActionListener listener) { jobManager.getJobOrThrowIfUnknown(request.getJobId()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java index b8b57a865e177..5a54e51f4ba2f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.action.MlInfoAction; @@ -37,7 +38,7 @@ public TransportMlInfoAction(Settings settings, TransportService transportServic } @Override - protected void doExecute(MlInfoAction.Request request, ActionListener listener) { + protected void doExecute(Task task, MlInfoAction.Request request, ActionListener listener) { Map info = new HashMap<>(); info.put("defaults", defaults()); info.put("limits", limits()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java index 49cb7dc45c954..c1279248908a2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction; @@ -53,7 +54,7 @@ public TransportPostCalendarEventsAction(Settings settings, TransportService tra } @Override - protected void doExecute(PostCalendarEventsAction.Request request, + protected void doExecute(Task task, PostCalendarEventsAction.Request request, ActionListener listener) { List events = request.getScheduledEvents(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java index 867372600d7a4..cc3a34f20f570 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; @@ -51,7 +52,7 @@ public TransportPreviewDatafeedAction(Settings settings, ThreadPool threadPool, } @Override - protected void doExecute(PreviewDatafeedAction.Request request, ActionListener listener) { + protected void doExecute(Task task, PreviewDatafeedAction.Request request, ActionListener listener) { MlMetadata mlMetadata = MlMetadata.getMlMetadata(clusterService.state()); DatafeedConfig datafeed = mlMetadata.getDatafeed(request.getDatafeedId()); if (datafeed == null) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java index 82caa9a35a6c5..7611a27cd5a1d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; @@ -46,7 +47,7 @@ public TransportPutCalendarAction(Settings settings, TransportService transportS } @Override - protected void doExecute(PutCalendarAction.Request request, ActionListener listener) { + protected void doExecute(Task task, PutCalendarAction.Request request, ActionListener listener) { Calendar calendar = request.getCalendar(); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, calendar.documentId()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java index 011606f3c14ed..19bf35aaed617 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PutFilterAction; @@ -46,7 +47,7 @@ public TransportPutFilterAction(Settings settings, TransportService transportSer } @Override - protected void doExecute(PutFilterAction.Request request, ActionListener listener) { + protected void doExecute(Task task, PutFilterAction.Request request, ActionListener listener) { MlFilter filter = request.getFilter(); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); indexRequest.opType(DocWriteRequest.OpType.CREATE); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java index fd19c7483bc05..c7c9488c26825 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; @@ -33,7 +34,7 @@ public TransportUpdateCalendarJobAction(Settings settings, TransportService tran } @Override - protected void doExecute(UpdateCalendarJobAction.Request request, ActionListener listener) { + protected void doExecute(Task task, UpdateCalendarJobAction.Request request, ActionListener listener) { Set jobIdsToAdd = Strings.tokenizeByCommaToSet(request.getJobIdsToAddExpression()); Set jobIdsToRemove = Strings.tokenizeByCommaToSet(request.getJobIdsToRemoveExpression()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java index 37f550fbb02ea..c8dbf9273829f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PutFilterAction; @@ -62,7 +63,7 @@ public TransportUpdateFilterAction(Settings settings, TransportService transport } @Override - protected void doExecute(UpdateFilterAction.Request request, ActionListener listener) { + protected void doExecute(Task task, UpdateFilterAction.Request request, ActionListener listener) { ActionListener filterListener = ActionListener.wrap(filterWithVersion -> { updateFilter(filterWithVersion, request, listener); }, listener::onFailure); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java index fe5498006d902..8000eaacd4fbe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.messages.Messages; @@ -50,7 +51,8 @@ public TransportUpdateModelSnapshotAction(Settings settings, TransportService tr } @Override - protected void doExecute(UpdateModelSnapshotAction.Request request, ActionListener listener) { + protected void doExecute(Task task, UpdateModelSnapshotAction.Request request, + ActionListener listener) { logger.debug("Received request to update model snapshot [{}] for job [{}]", request.getSnapshotId(), request.getJobId()); jobProvider.getModelSnapshot(request.getJobId(), request.getSnapshotId(), modelSnapshot -> { if (modelSnapshot == null) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java index 284ae505a01e4..c2e89dc78c28e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; @@ -25,7 +26,7 @@ public TransportValidateDetectorAction(Settings settings, TransportService trans } @Override - protected void doExecute(ValidateDetectorAction.Request request, ActionListener listener) { + protected void doExecute(Task task, ValidateDetectorAction.Request request, ActionListener listener) { listener.onResponse(new ValidateDetectorAction.Response(true)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java index dc2a8155c4d94..b644bc1d47067 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; @@ -25,7 +26,8 @@ public TransportValidateJobConfigAction(Settings settings, TransportService tran } @Override - protected void doExecute(ValidateJobConfigAction.Request request, ActionListener listener) { + protected void doExecute(Task task, ValidateJobConfigAction.Request request, + ActionListener listener) { listener.onResponse(new ValidateJobConfigAction.Response(true)); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java index 3ec30552a00b5..c48c33797c41d 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; @@ -51,7 +52,7 @@ public TransportMonitoringBulkAction(Settings settings, ThreadPool threadPool, C } @Override - protected void doExecute(MonitoringBulkRequest request, ActionListener listener) { + protected void doExecute(Task task, MonitoringBulkRequest request, ActionListener listener) { clusterService.state().blocks().globalBlockedRaiseException(ClusterBlockLevel.WRITE); // ignore incoming bulk requests when collection is disabled in ES diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java index 216afa49c1cc0..5f013e8897bde 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.GetRollupCapsAction; @@ -42,7 +43,7 @@ public TransportGetRollupCapsAction(Settings settings, TransportService transpor } @Override - protected void doExecute(GetRollupCapsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetRollupCapsAction.Request request, ActionListener listener) { Map allCaps = getCaps(request.getIndexPattern(), clusterService.state().getMetaData().indices()); listener.onResponse(new GetRollupCapsAction.Response(allCaps)); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 239be32033f13..7be9cc7ae3b20 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -99,7 +99,7 @@ public TransportRollupSearchAction(Settings settings, TransportService transport } @Override - protected void doExecute(SearchRequest request, ActionListener listener) { + protected void doExecute(Task task, SearchRequest request, ActionListener listener) { RollupSearchContext rollupSearchContext = separateIndices(request.indices(), clusterService.state().getMetaData().indices()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java index 62da4def6726a..2a88f5be00779 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleAction; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; @@ -31,7 +32,7 @@ public TransportDeleteRoleAction(Settings settings, ActionFilters actionFilters, } @Override - protected void doExecute(DeleteRoleRequest request, ActionListener listener) { + protected void doExecute(Task task, DeleteRoleRequest request, ActionListener listener) { if (ReservedRolesStore.isReserved(request.name())) { listener.onFailure(new IllegalArgumentException("role [" + request.name() + "] is reserved and cannot be deleted")); return; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java index 87c4ac7d9affa..b930e43e55c8b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.GetRolesAction; import org.elasticsearch.xpack.core.security.action.role.GetRolesRequest; @@ -37,7 +38,7 @@ public TransportGetRolesAction(Settings settings, ActionFilters actionFilters, } @Override - protected void doExecute(final GetRolesRequest request, final ActionListener listener) { + protected void doExecute(Task task, final GetRolesRequest request, final ActionListener listener) { final String[] requestedRoles = request.names(); final boolean specificRolesRequested = requestedRoles != null && requestedRoles.length > 0; final List rolesToSearchFor = new ArrayList<>(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java index 7257dc947f5d8..5edd8764c09a4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; @@ -29,7 +30,7 @@ public TransportPutRoleAction(Settings settings, ActionFilters actionFilters, } @Override - protected void doExecute(final PutRoleRequest request, final ActionListener listener) { + protected void doExecute(Task task, final PutRoleRequest request, final ActionListener listener) { final String name = request.roleDescriptor().getName(); if (ReservedRolesStore.isReserved(name)) { listener.onFailure(new IllegalArgumentException("role [" + name + "] is reserved and cannot be modified.")); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java index d5d30d80a3115..2f8e97661948f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; @@ -30,8 +31,7 @@ public TransportDeleteRoleMappingAction(Settings settings, ActionFilters actionF } @Override - protected void doExecute(DeleteRoleMappingRequest request, - ActionListener listener) { + protected void doExecute(Task task, DeleteRoleMappingRequest request, ActionListener listener) { roleMappingStore.deleteRoleMapping(request, new ActionListener() { @Override public void onResponse(Boolean found) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java index 313d4d02e69b2..86a4f57a6e74d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsAction; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; @@ -35,8 +36,7 @@ public TransportGetRoleMappingsAction(Settings settings, ActionFilters actionFil } @Override - protected void doExecute(final GetRoleMappingsRequest request, - final ActionListener listener) { + protected void doExecute(Task task, final GetRoleMappingsRequest request, final ActionListener listener) { final Set names; if (request.getNames() == null || request.getNames().length == 0) { names = null; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index 8e72a7d76e6ef..057e22d49a50c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; @@ -30,8 +31,7 @@ public TransportPutRoleMappingAction(Settings settings, ActionFilters actionFilt } @Override - protected void doExecute(final PutRoleMappingRequest request, - final ActionListener listener) { + protected void doExecute(Task task, final PutRoleMappingRequest request, final ActionListener listener) { roleMappingStore.putRoleMapping(request, ActionListener.wrap( created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java index 3d0965b96aa9c..d2507d51d0e88 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateAction; @@ -46,8 +47,7 @@ public TransportSamlAuthenticateAction(Settings settings, ThreadPool threadPool, } @Override - protected void doExecute(SamlAuthenticateRequest request, - ActionListener listener) { + protected void doExecute(Task task, SamlAuthenticateRequest request, ActionListener listener) { final SamlToken saml = new SamlToken(request.getSaml(), request.getValidRequestIds()); logger.trace("Attempting to authenticate SamlToken [{}]", saml); final ThreadContext threadContext = threadPool.getThreadContext(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java index 778364bf5c1a5..00caaf6dacff3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionAction; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionRequest; @@ -54,8 +55,7 @@ public TransportSamlInvalidateSessionAction(Settings settings, TransportService } @Override - protected void doExecute(SamlInvalidateSessionRequest request, - ActionListener listener) { + protected void doExecute(Task task, SamlInvalidateSessionRequest request, ActionListener listener) { List realms = findSamlRealms(this.realms, request.getRealmName(), request.getAssertionConsumerServiceURL()); if (realms.isEmpty()) { listener.onFailure(SamlUtils.samlException("Cannot find any matching realm for [{}]", request)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java index 43873c5bcadf4..63931d119e0f2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutAction; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutRequest; @@ -48,8 +49,7 @@ public TransportSamlLogoutAction(Settings settings, TransportService transportSe } @Override - protected void doExecute(SamlLogoutRequest request, - ActionListener listener) { + protected void doExecute(Task task, SamlLogoutRequest request, ActionListener listener) { invalidateRefreshToken(request.getRefreshToken(), ActionListener.wrap(ignore -> { try { final String token = request.getToken(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java index 58eb5ccc59ce1..48330bf63cd6c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationAction; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationRequest; @@ -42,9 +43,9 @@ public TransportSamlPrepareAuthenticationAction(Settings settings, TransportServ } @Override - protected void doExecute(SamlPrepareAuthenticationRequest request, + protected void doExecute(Task task, SamlPrepareAuthenticationRequest request, ActionListener listener) { - List realms = findSamlRealms(this.realms, request.getRealmName(), request.getAssertionConsumerServiceURL() ); + List realms = findSamlRealms(this.realms, request.getRealmName(), request.getAssertionConsumerServiceURL()); if (realms.isEmpty()) { listener.onFailure(SamlUtils.samlException("Cannot find any matching realm for [{}]", request)); } else if (realms.size() > 1) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java index 60d3086763a09..358f6aee712df 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.CreateTokenAction; @@ -46,7 +47,7 @@ public TransportCreateTokenAction(Settings settings, ThreadPool threadPool, Tran } @Override - protected void doExecute(CreateTokenRequest request, ActionListener listener) { + protected void doExecute(Task task, CreateTokenRequest request, ActionListener listener) { Authentication originatingAuthentication = Authentication.getAuthentication(threadPool.getThreadContext()); try (ThreadContext.StoredContext ignore = threadPool.getThreadContext().stashContext()) { final UsernamePasswordToken authToken = new UsernamePasswordToken(request.getUsername(), request.getPassword()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java index 7b280087d617b..ecc97399df862 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenAction; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenRequest; @@ -32,7 +33,7 @@ public TransportInvalidateTokenAction(Settings settings, TransportService transp } @Override - protected void doExecute(InvalidateTokenRequest request, ActionListener listener) { + protected void doExecute(Task task, InvalidateTokenRequest request, ActionListener listener) { final ActionListener invalidateListener = ActionListener.wrap(created -> listener.onResponse(new InvalidateTokenResponse(created)), listener::onFailure); if (request.getTokenType() == InvalidateTokenRequest.Type.ACCESS_TOKEN) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java index 601ee944dd82b..c84fd58830725 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequest; import org.elasticsearch.xpack.core.security.action.token.CreateTokenResponse; @@ -30,7 +31,7 @@ public TransportRefreshTokenAction(Settings settings, TransportService transport } @Override - protected void doExecute(CreateTokenRequest request, ActionListener listener) { + protected void doExecute(Task task, CreateTokenRequest request, ActionListener listener) { tokenService.refreshToken(request.getRefreshToken(), ActionListener.wrap(tuple -> { final String tokenStr = tokenService.getUserTokenString(tuple.v1()); final String scope = getResponseScopeValue(request.getScope()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java index 6a7321bd766de..57510ce116f7d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; @@ -35,7 +36,7 @@ public TransportAuthenticateAction(Settings settings, TransportService transport } @Override - protected void doExecute(AuthenticateRequest request, ActionListener listener) { + protected void doExecute(Task task, AuthenticateRequest request, ActionListener listener) { final User runAsUser = securityContext.getUser(); final User authUser = runAsUser == null ? null : runAsUser.authenticatedUser(); if (authUser == null) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java index 78b4ae0193655..7a42cd5fdea97 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; @@ -31,7 +32,7 @@ public TransportChangePasswordAction(Settings settings, TransportService transpo } @Override - protected void doExecute(ChangePasswordRequest request, ActionListener listener) { + protected void doExecute(Task task, ChangePasswordRequest request, ActionListener listener) { final String username = request.username(); if (AnonymousUser.isAnonymousUsername(username, settings)) { listener.onFailure(new IllegalArgumentException("user [" + username + "] is anonymous and cannot be modified via the API")); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java index dc78f64163e5f..36efdf3bd1737 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.DeleteUserAction; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; @@ -35,7 +36,7 @@ public TransportDeleteUserAction(Settings settings, ActionFilters actionFilters, } @Override - protected void doExecute(DeleteUserRequest request, final ActionListener listener) { + protected void doExecute(Task task, DeleteUserRequest request, final ActionListener listener) { final String username = request.username(); if (ClientReservedRealm.isReserved(username, settings)) { if (AnonymousUser.isAnonymousUsername(username, settings)) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java index 49e8c9d96aba9..7e17cda75f0ab 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.GetUsersAction; import org.elasticsearch.xpack.core.security.action.user.GetUsersRequest; @@ -43,7 +44,7 @@ public TransportGetUsersAction(Settings settings, ActionFilters actionFilters, } @Override - protected void doExecute(final GetUsersRequest request, final ActionListener listener) { + protected void doExecute(Task task, final GetUsersRequest request, final ActionListener listener) { final String[] requestedUsers = request.usernames(); final boolean specificUsersRequested = requestedUsers != null && requestedUsers.length > 0; final List usersToSearchFor = new ArrayList<>(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java index 1f6d0cd16decf..9571b022e0a67 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; @@ -55,7 +56,7 @@ public TransportHasPrivilegesAction(Settings settings, ThreadPool threadPool, Tr } @Override - protected void doExecute(HasPrivilegesRequest request, ActionListener listener) { + protected void doExecute(Task task, HasPrivilegesRequest request, ActionListener listener) { final String username = request.username(); final User user = Authentication.getAuthentication(threadPool.getThreadContext()).getUser(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java index 85411b0e75f89..ebc1612afca1b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; @@ -34,7 +35,7 @@ public TransportPutUserAction(Settings settings, ActionFilters actionFilters, } @Override - protected void doExecute(final PutUserRequest request, final ActionListener listener) { + protected void doExecute(Task task, final PutUserRequest request, final ActionListener listener) { final String username = request.username(); if (ClientReservedRealm.isReserved(username, settings)) { if (AnonymousUser.isAnonymousUsername(username, settings)) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java index f4e99d364ec0b..cbf505d9c6751 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.SetEnabledAction; @@ -38,7 +39,7 @@ public TransportSetEnabledAction(Settings settings, ThreadPool threadPool, Trans } @Override - protected void doExecute(SetEnabledRequest request, ActionListener listener) { + protected void doExecute(Task task, SetEnabledRequest request, ActionListener listener) { final String username = request.username(); // make sure the user is not disabling themselves if (Authentication.getAuthentication(threadPool.getThreadContext()).getUser().principal().equals(request.username())) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java index fba4afe47911e..572e948b26e80 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; @@ -51,7 +52,7 @@ public void testReservedRole() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteRoleResponse deleteRoleResponse) { responseRef.set(deleteRoleResponse); @@ -94,7 +95,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteRoleResponse deleteRoleResponse) { responseRef.set(deleteRoleResponse); @@ -138,7 +139,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteRoleResponse deleteRoleResponse) { responseRef.set(deleteRoleResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java index 27ae467c786db..672a24eb45d39 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.GetRolesRequest; @@ -64,7 +65,7 @@ public void testReservedRoles() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetRolesResponse response) { responseRef.set(response); @@ -105,7 +106,7 @@ public void testStoreRoles() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetRolesResponse response) { responseRef.set(response); @@ -173,7 +174,7 @@ public void testGetAllOrMix() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetRolesResponse response) { responseRef.set(response); @@ -221,7 +222,7 @@ public void testException() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetRolesResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java index 8392f92e0c31f..eb606314788c9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; @@ -51,7 +52,7 @@ public void testReservedRole() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutRoleResponse response) { responseRef.set(response); @@ -93,7 +94,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutRoleResponse response) { responseRef.set(response); @@ -135,7 +136,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutRoleResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java index ea6713bb85fe9..d10020cd78b3f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; @@ -67,7 +68,7 @@ public void testGetSingleRole() throws Exception { final ExpressionRoleMapping mapping = mock(ExpressionRoleMapping.class); result = Collections.singletonList(mapping); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); assertThat(future.get(), notNullValue()); assertThat(future.get().mappings(), arrayContaining(mapping)); assertThat(namesRef.get(), containsInAnyOrder("everyone")); @@ -83,7 +84,7 @@ public void testGetMultipleNamedRoles() throws Exception { final ExpressionRoleMapping mapping3 = mock(ExpressionRoleMapping.class); result = Arrays.asList(mapping1, mapping2, mapping3); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final GetRoleMappingsResponse response = future.get(); assertThat(response, notNullValue()); @@ -101,7 +102,7 @@ public void testGetAllRoles() throws Exception { final ExpressionRoleMapping mapping3 = mock(ExpressionRoleMapping.class); result = Arrays.asList(mapping1, mapping2, mapping3); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final GetRoleMappingsResponse response = future.get(); assertThat(response, notNullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index b105c0d5d0ea5..68a957c9c3c14 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; @@ -84,7 +85,7 @@ private PutRoleMappingResponse put(String name, FieldExpression expression, Stri request.setMetadata(metadata); request.setEnabled(true); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); return future.get(); } } \ No newline at end of file diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index b9232903f52ff..bec6038b65580 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.ThreadPool; @@ -240,7 +241,7 @@ public void testInvalidateCorrectTokensFromLogoutRequest() throws Exception { request.setRealmName(samlRealm.name()); request.setQueryString("SAMLRequest=foo"); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final SamlInvalidateSessionResponse response = future.get(); assertThat(response, notNullValue()); assertThat(response.getCount(), equalTo(2)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index 6d177d89021ab..1185fa29986b0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -228,7 +229,7 @@ public void testLogoutInvalidatesToken() throws Exception { final SamlLogoutRequest request = new SamlLogoutRequest(); request.setToken(tokenString); final PlainActionFuture listener = new PlainActionFuture<>(); - action.doExecute(request, listener); + action.doExecute(mock(Task.class), request, listener); final SamlLogoutResponse response = listener.get(); assertThat(response, notNullValue()); assertThat(response.getRedirectUrl(), notNullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java index 66e2192eee5dd..96b8b4fe25764 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -43,7 +44,7 @@ public void testInternalUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(new AuthenticateRequest(), new ActionListener() { + action.doExecute(mock(Task.class), new AuthenticateRequest(), new ActionListener() { @Override public void onResponse(AuthenticateResponse authenticateResponse) { responseRef.set(authenticateResponse); @@ -69,7 +70,7 @@ public void testNullUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(new AuthenticateRequest(), new ActionListener() { + action.doExecute(mock(Task.class), new AuthenticateRequest(), new ActionListener() { @Override public void onResponse(AuthenticateResponse authenticateResponse) { responseRef.set(authenticateResponse); @@ -97,7 +98,7 @@ public void testValidUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(new AuthenticateRequest(), new ActionListener() { + action.doExecute(mock(Task.class), new AuthenticateRequest(), new ActionListener() { @Override public void onResponse(AuthenticateResponse authenticateResponse) { responseRef.set(authenticateResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java index 4aa68c24c8d71..8808ab92a41f9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.transport.TransportService; @@ -59,7 +60,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(ChangePasswordResponse changePasswordResponse) { responseRef.set(changePasswordResponse); @@ -90,7 +91,7 @@ public void testInternalUsers() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(ChangePasswordResponse changePasswordResponse) { responseRef.set(changePasswordResponse); @@ -128,7 +129,7 @@ public void testValidUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(ChangePasswordResponse changePasswordResponse) { responseRef.set(changePasswordResponse); @@ -169,7 +170,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(ChangePasswordResponse changePasswordResponse) { responseRef.set(changePasswordResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java index 9c61d0cde520c..ed7f9cff6e25e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; @@ -53,7 +54,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); @@ -82,7 +83,7 @@ public void testInternalUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); @@ -112,7 +113,7 @@ public void testReservedUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); @@ -152,7 +153,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); @@ -192,7 +193,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java index 070fe4e64317b..b11a57c2d678a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -99,7 +100,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -134,7 +135,7 @@ public void testInternalUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -178,7 +179,7 @@ public void testReservedUsersOnly() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -225,7 +226,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -271,7 +272,7 @@ public void testGetStoreOnlyUsers() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -319,7 +320,7 @@ public void testException() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java index cb0f643fd89d7..9f4d7c957b46c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.mock.orig.Mockito; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -92,7 +93,7 @@ public void testNamedIndexPrivilegesMatchApplicableActions() throws Exception { .privileges(DeleteAction.NAME, IndexAction.NAME) .build()); final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); assertThat(response, notNullValue()); @@ -128,7 +129,7 @@ public void testMatchSubsetOfPrivileges() throws Exception { .privileges("delete", "index", "manage") .build()); final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); assertThat(response, notNullValue()); @@ -230,7 +231,7 @@ public void testWildcardHandling() throws Exception { .build() ); final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); assertThat(response, notNullValue()); @@ -298,7 +299,7 @@ private HasPrivilegesResponse hasPrivileges(RoleDescriptor.IndicesPrivileges ind request.clusterPrivileges(clusterPrivileges); request.indexPrivileges(indicesPrivileges); final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); assertThat(response, notNullValue()); return response; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java index 5f9a3f5243160..86a70bdf7e08e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.threadpool.ThreadPool; @@ -65,7 +66,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); @@ -94,7 +95,7 @@ public void testSystemUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); @@ -135,7 +136,7 @@ public void testReservedUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); @@ -179,7 +180,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); @@ -220,7 +221,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java index a8076c21cdb49..1c2eb8a9a1503 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -72,7 +73,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); @@ -110,7 +111,7 @@ public void testInternalUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); @@ -160,7 +161,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); @@ -212,7 +213,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); @@ -252,7 +253,7 @@ public void testUserModifyingThemselves() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java index b6a53ae95f098..7a216f3a2bf9f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.session.Configuration; @@ -33,7 +34,7 @@ public TransportSqlClearCursorAction(Settings settings, TransportService transpo } @Override - protected void doExecute(SqlClearCursorRequest request, ActionListener listener) { + protected void doExecute(Task task, SqlClearCursorRequest request, ActionListener listener) { sqlLicenseChecker.checkIfSqlAllowed(request.mode()); operation(planExecutor, request, listener); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index bc96b3de97307..7993f00d71aee 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.proto.ColumnInfo; @@ -43,7 +44,7 @@ public TransportSqlQueryAction(Settings settings, TransportService transportServ } @Override - protected void doExecute(SqlQueryRequest request, ActionListener listener) { + protected void doExecute(Task task, SqlQueryRequest request, ActionListener listener) { sqlLicenseChecker.checkIfSqlAllowed(request.mode()); operation(planExecutor, request, listener); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java index 61772ce6bb1e9..4ef7c14ab01f3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.session.Configuration; @@ -33,7 +34,7 @@ public TransportSqlTranslateAction(Settings settings, TransportService transport } @Override - protected void doExecute(SqlTranslateRequest request, ActionListener listener) { + protected void doExecute(Task task, SqlTranslateRequest request, ActionListener listener) { sqlLicenseChecker.checkIfSqlAllowed(request.mode()); Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(), diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java index 3d493299d7036..56ce9f6d4a280 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java @@ -35,11 +35,13 @@ protected String executor() { } @Override - protected void doExecute(Task task, final Request request, ActionListener listener) { + protected final void doExecute(Task task, final Request request, ActionListener listener) { if (licenseState.isWatcherAllowed()) { - super.doExecute(task, request, listener); + doExecute(request, listener); } else { listener.onFailure(LicenseUtils.newComplianceException(XPackField.WATCHER)); } } + + protected abstract void doExecute(Request request, ActionListener listener); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java index 3f430e2ec2479..d7ff25b623f50 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchAction; import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; @@ -42,7 +43,7 @@ public TransportDeleteWatchAction(Settings settings, TransportService transportS } @Override - protected void doExecute(DeleteWatchRequest request, ActionListener listener) { + protected void doExecute(Task task, DeleteWatchRequest request, ActionListener listener) { DeleteRequest deleteRequest = new DeleteRequest(Watch.INDEX, Watch.DOC_TYPE, request.getId()); deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, deleteRequest, From eb81a305ae36beed75dff7e61b05ec10c69f339b Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 22 Jun 2018 15:39:34 -0700 Subject: [PATCH 82/92] [DOCS] Move monitoring to docs folder (#31477) --- docs/reference/index.asciidoc | 2 +- .../docs/en => docs/reference}/monitoring/collectors.asciidoc | 1 + .../docs/en => docs/reference}/monitoring/exporters.asciidoc | 1 + .../docs/en => docs/reference}/monitoring/http-export.asciidoc | 1 + {x-pack/docs/en => docs/reference}/monitoring/index.asciidoc | 1 + .../docs/en => docs/reference}/monitoring/local-export.asciidoc | 1 + .../docs/en => docs/reference}/monitoring/pause-export.asciidoc | 1 + x-pack/docs/en/monitoring/configuring-monitoring.asciidoc | 1 + x-pack/docs/en/monitoring/indices.asciidoc | 1 + 9 files changed, 9 insertions(+), 1 deletion(-) rename {x-pack/docs/en => docs/reference}/monitoring/collectors.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/monitoring/exporters.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/monitoring/http-export.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/monitoring/index.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/monitoring/local-export.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/monitoring/pause-export.asciidoc (98%) diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index adbdc01db1e72..ef8e345303137 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -59,7 +59,7 @@ include::ingest.asciidoc[] include::{xes-repo-dir}/sql/index.asciidoc[] -include::{xes-repo-dir}/monitoring/index.asciidoc[] +include::monitoring/index.asciidoc[] include::{xes-repo-dir}/rollup/index.asciidoc[] diff --git a/x-pack/docs/en/monitoring/collectors.asciidoc b/docs/reference/monitoring/collectors.asciidoc similarity index 99% rename from x-pack/docs/en/monitoring/collectors.asciidoc rename to docs/reference/monitoring/collectors.asciidoc index 336f204b5eefb..bd48d1287006a 100644 --- a/x-pack/docs/en/monitoring/collectors.asciidoc +++ b/docs/reference/monitoring/collectors.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[es-monitoring-collectors]] == Collectors diff --git a/x-pack/docs/en/monitoring/exporters.asciidoc b/docs/reference/monitoring/exporters.asciidoc similarity index 99% rename from x-pack/docs/en/monitoring/exporters.asciidoc rename to docs/reference/monitoring/exporters.asciidoc index e7727f1e97af0..2a7729eee9425 100644 --- a/x-pack/docs/en/monitoring/exporters.asciidoc +++ b/docs/reference/monitoring/exporters.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[es-monitoring-exporters]] == Exporters diff --git a/x-pack/docs/en/monitoring/http-export.asciidoc b/docs/reference/monitoring/http-export.asciidoc similarity index 99% rename from x-pack/docs/en/monitoring/http-export.asciidoc rename to docs/reference/monitoring/http-export.asciidoc index db1dbe2a29c5b..4dfe1a0c537ea 100644 --- a/x-pack/docs/en/monitoring/http-export.asciidoc +++ b/docs/reference/monitoring/http-export.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[http-exporter]] === HTTP Exporters diff --git a/x-pack/docs/en/monitoring/index.asciidoc b/docs/reference/monitoring/index.asciidoc similarity index 99% rename from x-pack/docs/en/monitoring/index.asciidoc rename to docs/reference/monitoring/index.asciidoc index 6b8ecc5038ea0..d6a55f44585dd 100644 --- a/x-pack/docs/en/monitoring/index.asciidoc +++ b/docs/reference/monitoring/index.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[es-monitoring]] = Monitoring {es} diff --git a/x-pack/docs/en/monitoring/local-export.asciidoc b/docs/reference/monitoring/local-export.asciidoc similarity index 99% rename from x-pack/docs/en/monitoring/local-export.asciidoc rename to docs/reference/monitoring/local-export.asciidoc index 12d0ab5ea9f81..2bc757f07ecc8 100644 --- a/x-pack/docs/en/monitoring/local-export.asciidoc +++ b/docs/reference/monitoring/local-export.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[local-exporter]] === Local Exporters diff --git a/x-pack/docs/en/monitoring/pause-export.asciidoc b/docs/reference/monitoring/pause-export.asciidoc similarity index 98% rename from x-pack/docs/en/monitoring/pause-export.asciidoc rename to docs/reference/monitoring/pause-export.asciidoc index d26799c6892c3..128e72a463c2d 100644 --- a/x-pack/docs/en/monitoring/pause-export.asciidoc +++ b/docs/reference/monitoring/pause-export.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[pause-export]] == Pausing Data Collection diff --git a/x-pack/docs/en/monitoring/configuring-monitoring.asciidoc b/x-pack/docs/en/monitoring/configuring-monitoring.asciidoc index 1712c88380b7a..e705100e05e67 100644 --- a/x-pack/docs/en/monitoring/configuring-monitoring.asciidoc +++ b/x-pack/docs/en/monitoring/configuring-monitoring.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="gold"] [[configuring-monitoring]] == Configuring Monitoring in {es} ++++ diff --git a/x-pack/docs/en/monitoring/indices.asciidoc b/x-pack/docs/en/monitoring/indices.asciidoc index efa9836daa2e9..a27d91d423e8d 100644 --- a/x-pack/docs/en/monitoring/indices.asciidoc +++ b/x-pack/docs/en/monitoring/indices.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[config-monitoring-indices]] === Configuring Indices for Monitoring From 638b9fd88cdbe717d2e13bf4dbbda9ce610bc198 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 22 Jun 2018 15:40:25 -0700 Subject: [PATCH 83/92] [DOCS] Move sql to docs (#31474) --- docs/build.gradle | 82 +++++++++++++++++++ docs/reference/index.asciidoc | 2 +- .../reference}/sql/appendix/index.asciidoc | 0 .../sql/appendix/syntax-reserved.asciidoc | 2 + .../reference}/sql/concepts.asciidoc | 2 + .../reference}/sql/endpoints/cli.asciidoc | 2 + .../reference}/sql/endpoints/index.asciidoc | 0 .../reference}/sql/endpoints/jdbc.asciidoc | 2 + .../reference}/sql/endpoints/rest.asciidoc | 2 + .../sql/endpoints/translate.asciidoc | 2 + .../reference}/sql/functions/index.asciidoc | 2 + .../reference}/sql/getting-started.asciidoc | 2 + .../en => docs/reference}/sql/index.asciidoc | 1 + .../sql/language/data-types.asciidoc | 2 + .../reference}/sql/language/index.asciidoc | 2 + .../language/syntax/describe-table.asciidoc | 2 + .../sql/language/syntax/index.asciidoc | 2 + .../sql/language/syntax/select.asciidoc | 2 + .../sql/language/syntax/show-columns.asciidoc | 2 + .../language/syntax/show-functions.asciidoc | 2 + .../sql/language/syntax/show-tables.asciidoc | 2 + .../reference}/sql/overview.asciidoc | 2 + .../reference}/sql/security.asciidoc | 2 + 23 files changed, 120 insertions(+), 1 deletion(-) rename {x-pack/docs/en => docs/reference}/sql/appendix/index.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/sql/appendix/syntax-reserved.asciidoc (98%) rename {x-pack/docs/en => docs/reference}/sql/concepts.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/sql/endpoints/cli.asciidoc (97%) rename {x-pack/docs/en => docs/reference}/sql/endpoints/index.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/sql/endpoints/jdbc.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/sql/endpoints/rest.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/sql/endpoints/translate.asciidoc (97%) rename {x-pack/docs/en => docs/reference}/sql/functions/index.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/sql/getting-started.asciidoc (98%) rename {x-pack/docs/en => docs/reference}/sql/index.asciidoc (98%) rename {x-pack/docs/en => docs/reference}/sql/language/data-types.asciidoc (98%) rename {x-pack/docs/en => docs/reference}/sql/language/index.asciidoc (87%) rename {x-pack/docs/en => docs/reference}/sql/language/syntax/describe-table.asciidoc (87%) rename {x-pack/docs/en => docs/reference}/sql/language/syntax/index.asciidoc (94%) rename {x-pack/docs/en => docs/reference}/sql/language/syntax/select.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/sql/language/syntax/show-columns.asciidoc (85%) rename {x-pack/docs/en => docs/reference}/sql/language/syntax/show-functions.asciidoc (89%) rename {x-pack/docs/en => docs/reference}/sql/language/syntax/show-tables.asciidoc (89%) rename {x-pack/docs/en => docs/reference}/sql/overview.asciidoc (97%) rename {x-pack/docs/en => docs/reference}/sql/security.asciidoc (98%) diff --git a/docs/build.gradle b/docs/build.gradle index f1d1324192b16..b04016c946eed 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -522,3 +522,85 @@ for (int i = 0; i < 5; i++) { {"index":{}} {"ip": "12.0.0.$i"}""" } +// Used by SQL because it looks SQL-ish +buildRestTests.setups['library'] = ''' + - do: + indices.create: + index: library + body: + settings: + number_of_shards: 1 + number_of_replicas: 1 + mappings: + book: + properties: + name: + type: text + fields: + keyword: + type: keyword + author: + type: text + fields: + keyword: + type: keyword + release_date: + type: date + page_count: + type: short + - do: + bulk: + index: library + type: book + refresh: true + body: | + {"index":{"_id": "Leviathan Wakes"}} + {"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} + {"index":{"_id": "Hyperion"}} + {"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} + {"index":{"_id": "Dune"}} + {"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} + {"index":{"_id": "Dune Messiah"}} + {"name": "Dune Messiah", "author": "Frank Herbert", "release_date": "1969-10-15", "page_count": 331} + {"index":{"_id": "Children of Dune"}} + {"name": "Children of Dune", "author": "Frank Herbert", "release_date": "1976-04-21", "page_count": 408} + {"index":{"_id": "God Emperor of Dune"}} + {"name": "God Emperor of Dune", "author": "Frank Herbert", "release_date": "1981-05-28", "page_count": 454} + {"index":{"_id": "Consider Phlebas"}} + {"name": "Consider Phlebas", "author": "Iain M. Banks", "release_date": "1987-04-23", "page_count": 471} + {"index":{"_id": "Pandora's Star"}} + {"name": "Pandora's Star", "author": "Peter F. Hamilton", "release_date": "2004-03-02", "page_count": 768} + {"index":{"_id": "Revelation Space"}} + {"name": "Revelation Space", "author": "Alastair Reynolds", "release_date": "2000-03-15", "page_count": 585} + {"index":{"_id": "A Fire Upon the Deep"}} + {"name": "A Fire Upon the Deep", "author": "Vernor Vinge", "release_date": "1992-06-01", "page_count": 613} + {"index":{"_id": "Ender's Game"}} + {"name": "Ender's Game", "author": "Orson Scott Card", "release_date": "1985-06-01", "page_count": 324} + {"index":{"_id": "1984"}} + {"name": "1984", "author": "George Orwell", "release_date": "1985-06-01", "page_count": 328} + {"index":{"_id": "Fahrenheit 451"}} + {"name": "Fahrenheit 451", "author": "Ray Bradbury", "release_date": "1953-10-15", "page_count": 227} + {"index":{"_id": "Brave New World"}} + {"name": "Brave New World", "author": "Aldous Huxley", "release_date": "1932-06-01", "page_count": 268} + {"index":{"_id": "Foundation"}} + {"name": "Foundation", "author": "Isaac Asimov", "release_date": "1951-06-01", "page_count": 224} + {"index":{"_id": "The Giver"}} + {"name": "The Giver", "author": "Lois Lowry", "release_date": "1993-04-26", "page_count": 208} + {"index":{"_id": "Slaughterhouse-Five"}} + {"name": "Slaughterhouse-Five", "author": "Kurt Vonnegut", "release_date": "1969-06-01", "page_count": 275} + {"index":{"_id": "The Hitchhiker's Guide to the Galaxy"}} + {"name": "The Hitchhiker's Guide to the Galaxy", "author": "Douglas Adams", "release_date": "1979-10-12", "page_count": 180} + {"index":{"_id": "Snow Crash"}} + {"name": "Snow Crash", "author": "Neal Stephenson", "release_date": "1992-06-01", "page_count": 470} + {"index":{"_id": "Neuromancer"}} + {"name": "Neuromancer", "author": "William Gibson", "release_date": "1984-07-01", "page_count": 271} + {"index":{"_id": "The Handmaid's Tale"}} + {"name": "The Handmaid's Tale", "author": "Margaret Atwood", "release_date": "1985-06-01", "page_count": 311} + {"index":{"_id": "Starship Troopers"}} + {"name": "Starship Troopers", "author": "Robert A. Heinlein", "release_date": "1959-12-01", "page_count": 335} + {"index":{"_id": "The Left Hand of Darkness"}} + {"name": "The Left Hand of Darkness", "author": "Ursula K. Le Guin", "release_date": "1969-06-01", "page_count": 304} + {"index":{"_id": "The Moon is a Harsh Mistress"}} + {"name": "The Moon is a Harsh Mistress", "author": "Robert A. Heinlein", "release_date": "1966-04-01", "page_count": 288} + +''' \ No newline at end of file diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index ef8e345303137..7d51e4aa51264 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -57,7 +57,7 @@ include::index-modules.asciidoc[] include::ingest.asciidoc[] -include::{xes-repo-dir}/sql/index.asciidoc[] +include::sql/index.asciidoc[] include::monitoring/index.asciidoc[] diff --git a/x-pack/docs/en/sql/appendix/index.asciidoc b/docs/reference/sql/appendix/index.asciidoc similarity index 100% rename from x-pack/docs/en/sql/appendix/index.asciidoc rename to docs/reference/sql/appendix/index.asciidoc diff --git a/x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc b/docs/reference/sql/appendix/syntax-reserved.asciidoc similarity index 98% rename from x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc rename to docs/reference/sql/appendix/syntax-reserved.asciidoc index bbdefcbcb54aa..7a502d6eea939 100644 --- a/x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc +++ b/docs/reference/sql/appendix/syntax-reserved.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [appendix] [[sql-syntax-reserved]] = Reserved Keywords diff --git a/x-pack/docs/en/sql/concepts.asciidoc b/docs/reference/sql/concepts.asciidoc similarity index 99% rename from x-pack/docs/en/sql/concepts.asciidoc rename to docs/reference/sql/concepts.asciidoc index f5eab6f37baf8..1dc23e391fab1 100644 --- a/x-pack/docs/en/sql/concepts.asciidoc +++ b/docs/reference/sql/concepts.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-concepts]] == Conventions and Terminology diff --git a/x-pack/docs/en/sql/endpoints/cli.asciidoc b/docs/reference/sql/endpoints/cli.asciidoc similarity index 97% rename from x-pack/docs/en/sql/endpoints/cli.asciidoc rename to docs/reference/sql/endpoints/cli.asciidoc index e04fd96ab7198..0908c2344bb15 100644 --- a/x-pack/docs/en/sql/endpoints/cli.asciidoc +++ b/docs/reference/sql/endpoints/cli.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-cli]] == SQL CLI diff --git a/x-pack/docs/en/sql/endpoints/index.asciidoc b/docs/reference/sql/endpoints/index.asciidoc similarity index 100% rename from x-pack/docs/en/sql/endpoints/index.asciidoc rename to docs/reference/sql/endpoints/index.asciidoc diff --git a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc b/docs/reference/sql/endpoints/jdbc.asciidoc similarity index 99% rename from x-pack/docs/en/sql/endpoints/jdbc.asciidoc rename to docs/reference/sql/endpoints/jdbc.asciidoc index 84182f8b4a521..6a8793f7e24e2 100644 --- a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc +++ b/docs/reference/sql/endpoints/jdbc.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="platinum"] [[sql-jdbc]] == SQL JDBC diff --git a/x-pack/docs/en/sql/endpoints/rest.asciidoc b/docs/reference/sql/endpoints/rest.asciidoc similarity index 99% rename from x-pack/docs/en/sql/endpoints/rest.asciidoc rename to docs/reference/sql/endpoints/rest.asciidoc index fa5093f8de528..f33189303e682 100644 --- a/x-pack/docs/en/sql/endpoints/rest.asciidoc +++ b/docs/reference/sql/endpoints/rest.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-rest]] == SQL REST API diff --git a/x-pack/docs/en/sql/endpoints/translate.asciidoc b/docs/reference/sql/endpoints/translate.asciidoc similarity index 97% rename from x-pack/docs/en/sql/endpoints/translate.asciidoc rename to docs/reference/sql/endpoints/translate.asciidoc index be6a77a3caa44..db450b5f914c8 100644 --- a/x-pack/docs/en/sql/endpoints/translate.asciidoc +++ b/docs/reference/sql/endpoints/translate.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-translate]] == SQL Translate API diff --git a/x-pack/docs/en/sql/functions/index.asciidoc b/docs/reference/sql/functions/index.asciidoc similarity index 99% rename from x-pack/docs/en/sql/functions/index.asciidoc rename to docs/reference/sql/functions/index.asciidoc index dd68370dde3e7..93d201a182828 100644 --- a/x-pack/docs/en/sql/functions/index.asciidoc +++ b/docs/reference/sql/functions/index.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-functions]] == Functions and Operators diff --git a/x-pack/docs/en/sql/getting-started.asciidoc b/docs/reference/sql/getting-started.asciidoc similarity index 98% rename from x-pack/docs/en/sql/getting-started.asciidoc rename to docs/reference/sql/getting-started.asciidoc index 24f01910551bb..7d1bd33e8a035 100644 --- a/x-pack/docs/en/sql/getting-started.asciidoc +++ b/docs/reference/sql/getting-started.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-getting-started]] == Getting Started with SQL diff --git a/x-pack/docs/en/sql/index.asciidoc b/docs/reference/sql/index.asciidoc similarity index 98% rename from x-pack/docs/en/sql/index.asciidoc rename to docs/reference/sql/index.asciidoc index f96b83db08ad6..33b9da9fab93d 100644 --- a/x-pack/docs/en/sql/index.asciidoc +++ b/docs/reference/sql/index.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[xpack-sql]] = SQL Access diff --git a/x-pack/docs/en/sql/language/data-types.asciidoc b/docs/reference/sql/language/data-types.asciidoc similarity index 98% rename from x-pack/docs/en/sql/language/data-types.asciidoc rename to docs/reference/sql/language/data-types.asciidoc index 7e5f045aa6ce9..7f98add97248b 100644 --- a/x-pack/docs/en/sql/language/data-types.asciidoc +++ b/docs/reference/sql/language/data-types.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-data-types]] == Data Types diff --git a/x-pack/docs/en/sql/language/index.asciidoc b/docs/reference/sql/language/index.asciidoc similarity index 87% rename from x-pack/docs/en/sql/language/index.asciidoc rename to docs/reference/sql/language/index.asciidoc index fdf6f3e7950ca..6558e9ad92bf8 100644 --- a/x-pack/docs/en/sql/language/index.asciidoc +++ b/docs/reference/sql/language/index.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-spec]] == SQL Language diff --git a/x-pack/docs/en/sql/language/syntax/describe-table.asciidoc b/docs/reference/sql/language/syntax/describe-table.asciidoc similarity index 87% rename from x-pack/docs/en/sql/language/syntax/describe-table.asciidoc rename to docs/reference/sql/language/syntax/describe-table.asciidoc index 114def470b181..dd2d27a5781d2 100644 --- a/x-pack/docs/en/sql/language/syntax/describe-table.asciidoc +++ b/docs/reference/sql/language/syntax/describe-table.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-describe-table]] === DESCRIBE TABLE diff --git a/x-pack/docs/en/sql/language/syntax/index.asciidoc b/docs/reference/sql/language/syntax/index.asciidoc similarity index 94% rename from x-pack/docs/en/sql/language/syntax/index.asciidoc rename to docs/reference/sql/language/syntax/index.asciidoc index e0e970edae14b..4af8f19d7034b 100644 --- a/x-pack/docs/en/sql/language/syntax/index.asciidoc +++ b/docs/reference/sql/language/syntax/index.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-commands]] == SQL Commands diff --git a/x-pack/docs/en/sql/language/syntax/select.asciidoc b/docs/reference/sql/language/syntax/select.asciidoc similarity index 99% rename from x-pack/docs/en/sql/language/syntax/select.asciidoc rename to docs/reference/sql/language/syntax/select.asciidoc index 35c2bf0737db9..4a7c0534b68a3 100644 --- a/x-pack/docs/en/sql/language/syntax/select.asciidoc +++ b/docs/reference/sql/language/syntax/select.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-select]] === SELECT diff --git a/x-pack/docs/en/sql/language/syntax/show-columns.asciidoc b/docs/reference/sql/language/syntax/show-columns.asciidoc similarity index 85% rename from x-pack/docs/en/sql/language/syntax/show-columns.asciidoc rename to docs/reference/sql/language/syntax/show-columns.asciidoc index 2e7c8f7bfca69..a52c744f17a97 100644 --- a/x-pack/docs/en/sql/language/syntax/show-columns.asciidoc +++ b/docs/reference/sql/language/syntax/show-columns.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-show-columns]] === SHOW COLUMNS diff --git a/x-pack/docs/en/sql/language/syntax/show-functions.asciidoc b/docs/reference/sql/language/syntax/show-functions.asciidoc similarity index 89% rename from x-pack/docs/en/sql/language/syntax/show-functions.asciidoc rename to docs/reference/sql/language/syntax/show-functions.asciidoc index 197b9e8cb3b79..964cdf39081c6 100644 --- a/x-pack/docs/en/sql/language/syntax/show-functions.asciidoc +++ b/docs/reference/sql/language/syntax/show-functions.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-show-functions]] === SHOW FUNCTIONS diff --git a/x-pack/docs/en/sql/language/syntax/show-tables.asciidoc b/docs/reference/sql/language/syntax/show-tables.asciidoc similarity index 89% rename from x-pack/docs/en/sql/language/syntax/show-tables.asciidoc rename to docs/reference/sql/language/syntax/show-tables.asciidoc index 9266b6d58058b..7772c39c6fc21 100644 --- a/x-pack/docs/en/sql/language/syntax/show-tables.asciidoc +++ b/docs/reference/sql/language/syntax/show-tables.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-show-tables]] === SHOW TABLES diff --git a/x-pack/docs/en/sql/overview.asciidoc b/docs/reference/sql/overview.asciidoc similarity index 97% rename from x-pack/docs/en/sql/overview.asciidoc rename to docs/reference/sql/overview.asciidoc index 36eff69f6263d..a72f5ca61feb5 100644 --- a/x-pack/docs/en/sql/overview.asciidoc +++ b/docs/reference/sql/overview.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-overview]] == Overview diff --git a/x-pack/docs/en/sql/security.asciidoc b/docs/reference/sql/security.asciidoc similarity index 98% rename from x-pack/docs/en/sql/security.asciidoc rename to docs/reference/sql/security.asciidoc index bba73a2a4de6d..64f554f023195 100644 --- a/x-pack/docs/en/sql/security.asciidoc +++ b/docs/reference/sql/security.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-security]] == Security From 51151027cdee34d668802b90dba263b2f4bc8695 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 22 Jun 2018 17:42:58 -0400 Subject: [PATCH 84/92] TEST: Add bwc recovery tests with synced-flush index Although the master branch does not affect by #31482, it's helpful to have BWC tests that verify the peer recovery with a synced-flush index. This commit adds the bwc tests from #31506 to the master branch. Relates #31482 Relates #31506 --- .../upgrades/FullClusterRestartIT.java | 22 +++++++++++-- .../elasticsearch/upgrades/RecoveryIT.java | 32 +++++++++++++++++++ 2 files changed, 52 insertions(+), 2 deletions(-) diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 57c6ad7ff861f..081a1918674d0 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -24,7 +24,9 @@ import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.Version; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Booleans; @@ -701,8 +703,24 @@ public void testRecovery() throws Exception { // make sure all recoveries are done ensureGreen(index); - // Explicitly flush so we're sure to have a bunch of documents in the Lucene index - client().performRequest("POST", "/_flush"); + // Recovering a synced-flush index from 5.x to 6.x might be subtle as a 5.x index commit does not have all 6.x commit tags. + if (randomBoolean()) { + // We have to spin synced-flush requests here because we fire the global checkpoint sync for the last write operation. + // A synced-flush request considers the global checkpoint sync as an going operation because it acquires a shard permit. + assertBusy(() -> { + try { + Response resp = client().performRequest(new Request("POST", index + "/_flush/synced")); + Map result = ObjectPath.createFromResponse(resp).evaluate("_shards"); + assertThat(result.get("successful"), equalTo(result.get("total"))); + assertThat(result.get("failed"), equalTo(0)); + } catch (ResponseException ex) { + throw new AssertionError(ex); // cause assert busy to retry + } + }); + } else { + // Explicitly flush so we're sure to have a bunch of documents in the Lucene index + assertOK(client().performRequest(new Request("POST", "/_flush"))); + } if (shouldHaveTranslog) { // Update a few documents so we are sure to have a translog indexRandomDocuments(count / 10, false /* Flushing here would invalidate the whole thing....*/, false, diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index 1351de16cf718..809cd40d698df 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -22,7 +22,9 @@ import org.apache.http.entity.StringEntity; import org.elasticsearch.Version; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -239,4 +241,34 @@ public void testRelocationWithConcurrentIndexing() throws Exception { } } + public void testRecoverSyncedFlushIndex() throws Exception { + final String index = "recover_synced_flush_index"; + if (CLUSTER_TYPE == ClusterType.OLD) { + Settings.Builder settings = Settings.builder() + .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) + .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1) + // if the node with the replica is the first to be restarted, while a replica is still recovering + // then delayed allocation will kick in. When the node comes back, the master will search for a copy + // but the recovering copy will be seen as invalid and the cluster health won't return to GREEN + // before timing out + .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms") + .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster + createIndex(index, settings.build()); + indexDocs(index, 0, randomInt(5)); + // We have to spin synced-flush requests here because we fire the global checkpoint sync for the last write operation. + // A synced-flush request considers the global checkpoint sync as an going operation because it acquires a shard permit. + assertBusy(() -> { + try { + Response resp = client().performRequest(new Request("POST", index + "/_flush/synced")); + Map result = ObjectPath.createFromResponse(resp).evaluate("_shards"); + assertThat(result.get("successful"), equalTo(result.get("total"))); + assertThat(result.get("failed"), equalTo(0)); + } catch (ResponseException ex) { + throw new AssertionError(ex); // cause assert busy to retry + } + }); + } + ensureGreen(index); + } + } From b6cc6fc2bc44fbde69617566d549829c466cccda Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 22 Jun 2018 18:09:37 -0700 Subject: [PATCH 85/92] [DOCS] Updates Watcher examples for code testing (#31152) --- x-pack/docs/build.gradle | 32 ------------------- x-pack/docs/en/watcher/actions/email.asciidoc | 7 ++-- .../docs/en/watcher/actions/hipchat.asciidoc | 4 ++- x-pack/docs/en/watcher/actions/index.asciidoc | 1 + x-pack/docs/en/watcher/actions/jira.asciidoc | 1 + .../docs/en/watcher/actions/logging.asciidoc | 1 + .../en/watcher/actions/pagerduty.asciidoc | 2 ++ x-pack/docs/en/watcher/actions/slack.asciidoc | 3 ++ .../docs/en/watcher/actions/webhook.asciidoc | 4 +++ .../docs/en/watcher/condition/always.asciidoc | 1 + .../watcher/condition/array-compare.asciidoc | 1 + .../en/watcher/condition/compare.asciidoc | 3 ++ .../docs/en/watcher/condition/never.asciidoc | 1 + .../docs/en/watcher/condition/script.asciidoc | 7 ++++ .../en/watcher/customizing-watches.asciidoc | 10 ++++-- .../example-watch-meetupdata.asciidoc | 24 ++++++++++++-- .../en/watcher/how-watcher-works.asciidoc | 7 ++++ x-pack/docs/en/watcher/input/chain.asciidoc | 2 ++ x-pack/docs/en/watcher/input/http.asciidoc | 6 ++++ x-pack/docs/en/watcher/input/search.asciidoc | 5 +++ x-pack/docs/en/watcher/input/simple.asciidoc | 2 ++ x-pack/docs/en/watcher/transform.asciidoc | 1 + .../docs/en/watcher/transform/chain.asciidoc | 1 + .../docs/en/watcher/transform/script.asciidoc | 1 + .../docs/en/watcher/transform/search.asciidoc | 4 +++ .../en/watcher/trigger/schedule/cron.asciidoc | 2 ++ .../watcher/trigger/schedule/daily.asciidoc | 4 +++ .../watcher/trigger/schedule/hourly.asciidoc | 2 ++ .../trigger/schedule/interval.asciidoc | 1 + .../watcher/trigger/schedule/monthly.asciidoc | 3 ++ .../watcher/trigger/schedule/weekly.asciidoc | 3 ++ .../watcher/trigger/schedule/yearly.asciidoc | 3 ++ .../docs/en/watcher/troubleshooting.asciidoc | 2 ++ 33 files changed, 110 insertions(+), 41 deletions(-) diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 912c9965b4d9a..4176cdeb0b7d6 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -14,38 +14,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/security/authorization/run-as-privilege.asciidoc', 'en/security/ccs-clients-integrations/http.asciidoc', 'en/security/authorization/custom-roles-provider.asciidoc', - 'en/watcher/actions/email.asciidoc', - 'en/watcher/actions/hipchat.asciidoc', - 'en/watcher/actions/index.asciidoc', - 'en/watcher/actions/logging.asciidoc', - 'en/watcher/actions/pagerduty.asciidoc', - 'en/watcher/actions/slack.asciidoc', - 'en/watcher/actions/jira.asciidoc', - 'en/watcher/actions/webhook.asciidoc', - 'en/watcher/condition/always.asciidoc', - 'en/watcher/condition/array-compare.asciidoc', - 'en/watcher/condition/compare.asciidoc', - 'en/watcher/condition/never.asciidoc', - 'en/watcher/condition/script.asciidoc', - 'en/watcher/customizing-watches.asciidoc', - 'en/watcher/example-watches/example-watch-meetupdata.asciidoc', - 'en/watcher/how-watcher-works.asciidoc', - 'en/watcher/input/chain.asciidoc', - 'en/watcher/input/http.asciidoc', - 'en/watcher/input/search.asciidoc', - 'en/watcher/input/simple.asciidoc', - 'en/watcher/transform.asciidoc', - 'en/watcher/transform/chain.asciidoc', - 'en/watcher/transform/script.asciidoc', - 'en/watcher/transform/search.asciidoc', - 'en/watcher/trigger/schedule/cron.asciidoc', - 'en/watcher/trigger/schedule/daily.asciidoc', - 'en/watcher/trigger/schedule/hourly.asciidoc', - 'en/watcher/trigger/schedule/interval.asciidoc', - 'en/watcher/trigger/schedule/monthly.asciidoc', - 'en/watcher/trigger/schedule/weekly.asciidoc', - 'en/watcher/trigger/schedule/yearly.asciidoc', - 'en/watcher/troubleshooting.asciidoc', 'en/rest-api/ml/delete-snapshot.asciidoc', 'en/rest-api/ml/get-bucket.asciidoc', 'en/rest-api/ml/get-job-stats.asciidoc', diff --git a/x-pack/docs/en/watcher/actions/email.asciidoc b/x-pack/docs/en/watcher/actions/email.asciidoc index 5e0ee4c451ac6..0da028fcc7b1e 100644 --- a/x-pack/docs/en/watcher/actions/email.asciidoc +++ b/x-pack/docs/en/watcher/actions/email.asciidoc @@ -35,6 +35,7 @@ the watch payload in the email body: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action. <2> The action type is set to `email`. <3> One or more addresses to send the email to. Must be specified in the @@ -92,6 +93,7 @@ killed by firewalls or load balancers inbetween. } } -------------------------------------------------- +// NOTCONSOLE <1> The ID of the attachment, which is used as the file name in the email attachment. <2> The type of the attachment and its specific configuration. @@ -158,9 +160,8 @@ include::{kib-repo-dir}/reporting/watch-example.asciidoc[] include::{kib-repo-dir}/reporting/report-intervals.asciidoc[] -//TODO: RE-ADD LINK: -//For more information, see -//{kibana-ref}/automating-report-generation.html[Automating Report Generation]. +For more information, see +{kibana-ref}/automating-report-generation.html[Automating Report Generation]. [[email-action-attributes]] ==== Email Action Attributes diff --git a/x-pack/docs/en/watcher/actions/hipchat.asciidoc b/x-pack/docs/en/watcher/actions/hipchat.asciidoc index e92c84638dc19..da5b7558c4a58 100644 --- a/x-pack/docs/en/watcher/actions/hipchat.asciidoc +++ b/x-pack/docs/en/watcher/actions/hipchat.asciidoc @@ -37,6 +37,7 @@ attribute is the message itself: } } -------------------------------------------------- +// NOTCONSOLE <1> The name of a HipChat account configured in `elasticsearch.yml`. <2> The message you want to send to HipChat. @@ -66,6 +67,7 @@ For example, the following action is configured to send messages to the } } -------------------------------------------------- +// NOTCONSOLE To send messages with a HipChat account that uses the <> profile, you need to specify what room or rooms you want to send the message to. @@ -92,7 +94,7 @@ For example, the following action is configured to send messages to the } } -------------------------------------------------- - +// NOTCONSOLE [[hipchat-action-attributes]] ==== HipChat Action Attributes diff --git a/x-pack/docs/en/watcher/actions/index.asciidoc b/x-pack/docs/en/watcher/actions/index.asciidoc index dd8d76fe549f3..8a31b150f22cb 100644 --- a/x-pack/docs/en/watcher/actions/index.asciidoc +++ b/x-pack/docs/en/watcher/actions/index.asciidoc @@ -22,6 +22,7 @@ The following snippet shows a simple `index` action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action <2> An optional <> to restrict action execution <3> An optional <> to transform the payload and prepare the data that should be indexed diff --git a/x-pack/docs/en/watcher/actions/jira.asciidoc b/x-pack/docs/en/watcher/actions/jira.asciidoc index 4d35fd5163702..dc1afdc93b342 100644 --- a/x-pack/docs/en/watcher/actions/jira.asciidoc +++ b/x-pack/docs/en/watcher/actions/jira.asciidoc @@ -40,6 +40,7 @@ The following snippet shows a simple jira action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The name of a Jira account configured in `elasticsearch.yml`. <2> The key of the Jira project in which the issue will be created. <3> The name of the issue type. diff --git a/x-pack/docs/en/watcher/actions/logging.asciidoc b/x-pack/docs/en/watcher/actions/logging.asciidoc index aa747028f7fa2..a8a4454c377eb 100644 --- a/x-pack/docs/en/watcher/actions/logging.asciidoc +++ b/x-pack/docs/en/watcher/actions/logging.asciidoc @@ -25,6 +25,7 @@ The following snippet shows a simple logging action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action. <2> An optional <> to transform the payload before executing the `logging` action. diff --git a/x-pack/docs/en/watcher/actions/pagerduty.asciidoc b/x-pack/docs/en/watcher/actions/pagerduty.asciidoc index 1a673435a7ce6..1b93a0f219c6d 100644 --- a/x-pack/docs/en/watcher/actions/pagerduty.asciidoc +++ b/x-pack/docs/en/watcher/actions/pagerduty.asciidoc @@ -25,6 +25,7 @@ The following snippet shows a simple PagerDuty action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> Description of the message @@ -59,6 +60,7 @@ payload as well as an array of contexts to the action. } } -------------------------------------------------- +// NOTCONSOLE [[pagerduty-action-attributes]] diff --git a/x-pack/docs/en/watcher/actions/slack.asciidoc b/x-pack/docs/en/watcher/actions/slack.asciidoc index 3dc3c2c04ca73..0753f333dc733 100644 --- a/x-pack/docs/en/watcher/actions/slack.asciidoc +++ b/x-pack/docs/en/watcher/actions/slack.asciidoc @@ -29,6 +29,7 @@ The following snippet shows a simple slack action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The channels and users you want to send the message to. <2> The content of the message. @@ -66,6 +67,7 @@ The following snippet shows a standard message attachment: } } -------------------------------------------------- +// NOTCONSOLE [[slack-dynamic-attachment]] @@ -131,6 +133,7 @@ aggregation and the Slack action: } } -------------------------------------------------- +// NOTCONSOLE <1> The list generated by the action's transform. <2> The parameter placeholders refer to attributes in each item of the list generated by the transform. diff --git a/x-pack/docs/en/watcher/actions/webhook.asciidoc b/x-pack/docs/en/watcher/actions/webhook.asciidoc index 806777a406c6f..aabfb17f3b6e5 100644 --- a/x-pack/docs/en/watcher/actions/webhook.asciidoc +++ b/x-pack/docs/en/watcher/actions/webhook.asciidoc @@ -30,6 +30,7 @@ The following snippet shows a simple webhook action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action <2> An optional <> to transform the payload before executing the `webhook` action @@ -65,6 +66,7 @@ For example, the following `webhook` action creates a new issue in GitHub: } } -------------------------------------------------- +// NOTCONSOLE <1> The username and password for the user creating the issue NOTE: By default, both the username and the password are stored in the `.watches` @@ -101,6 +103,7 @@ the values serve as the parameter values: } } -------------------------------------------------- +// NOTCONSOLE <1> The parameter values can contain templated strings. @@ -128,6 +131,7 @@ the values serve as the header values: } } -------------------------------------------------- +// NOTCONSOLE <1> The header values can contain templated strings. diff --git a/x-pack/docs/en/watcher/condition/always.asciidoc b/x-pack/docs/en/watcher/condition/always.asciidoc index 22203018c926e..c2eb37be52c8f 100644 --- a/x-pack/docs/en/watcher/condition/always.asciidoc +++ b/x-pack/docs/en/watcher/condition/always.asciidoc @@ -22,3 +22,4 @@ object: "always" : {} } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/condition/array-compare.asciidoc b/x-pack/docs/en/watcher/condition/array-compare.asciidoc index 48b073e202c34..b413690865e60 100644 --- a/x-pack/docs/en/watcher/condition/array-compare.asciidoc +++ b/x-pack/docs/en/watcher/condition/array-compare.asciidoc @@ -34,6 +34,7 @@ than or equal to 25: } } -------------------------------------------------- +// NOTCONSOLE <1> The path to the array in the execution context that you want to evaluate, specified in dot notation. <2> The path to the field in each array element that you want to evaluate. diff --git a/x-pack/docs/en/watcher/condition/compare.asciidoc b/x-pack/docs/en/watcher/condition/compare.asciidoc index fc30a44bafe49..d58638e6fe472 100644 --- a/x-pack/docs/en/watcher/condition/compare.asciidoc +++ b/x-pack/docs/en/watcher/condition/compare.asciidoc @@ -49,6 +49,7 @@ search result>> is greater than or equal to 5: } } -------------------------------------------------- +// NOTCONSOLE <1> Use dot notation to reference a value in the execution context. <2> Specify a comparison operator and the value you want to compare against. @@ -68,6 +69,7 @@ of the form `<{expression}>`. For example, the following expression returns } } -------------------------------------------------- +// NOTCONSOLE You can also compare two values in the execution context by specifying the compared value as a path of the form of `{{path}}`. For example, the following @@ -85,6 +87,7 @@ to the `ctx.payload.aggregations.handled.buckets.true.doc_count`: } } -------------------------------------------------- +// NOTCONSOLE ==== Accessing Values in the Execution Context diff --git a/x-pack/docs/en/watcher/condition/never.asciidoc b/x-pack/docs/en/watcher/condition/never.asciidoc index d3d5cf39a4465..b8cad0b8c04d5 100644 --- a/x-pack/docs/en/watcher/condition/never.asciidoc +++ b/x-pack/docs/en/watcher/condition/never.asciidoc @@ -17,3 +17,4 @@ you specify the condition type and associate it with an empty object: "never" : {} } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/condition/script.asciidoc b/x-pack/docs/en/watcher/condition/script.asciidoc index 98cdd974f0ce4..ee6a9531bf7b3 100644 --- a/x-pack/docs/en/watcher/condition/script.asciidoc +++ b/x-pack/docs/en/watcher/condition/script.asciidoc @@ -19,6 +19,7 @@ The following snippet configures an inline `script` condition that always return "script" : "return true" } -------------------------------------------------- +// NOTCONSOLE This example defines a script as a simple string. This format is actually a shortcut for defining an <> script. The @@ -43,6 +44,7 @@ parameter, `result`: } } -------------------------------------------------- +// NOTCONSOLE [[condition-script-inline]] ==== Inline Scripts @@ -59,6 +61,7 @@ always returns `true`. } } -------------------------------------------------- +// NOTCONSOLE [[condition-script-stored]] ==== Stored Scripts @@ -74,6 +77,7 @@ in Elasticsearch. The following snippet shows how to refer to a script by its `i } } -------------------------------------------------- +// NOTCONSOLE As with <> scripts, you can also specify the script language and parameters: @@ -88,6 +92,7 @@ scripts, you can also specify the script language and parameters: } } -------------------------------------------------- +// NOTCONSOLE [[accessing-watch-payload]] ==== Accessing the Watch Payload @@ -121,6 +126,7 @@ threshold: } } -------------------------------------------------- +// NOTCONSOLE When you're using a scripted condition to evaluate an Elasticsearch response, keep in mind that the fields in the response are no longer in their native data @@ -132,6 +138,7 @@ you need to parse the `@timestamp` string into a `DateTime`. For example: -------------------------------------------------- org.elasticsearch.common.joda.time.DateTime.parse(@timestamp) -------------------------------------------------- +// NOTCONSOLE You can reference the following variables in the watch context: diff --git a/x-pack/docs/en/watcher/customizing-watches.asciidoc b/x-pack/docs/en/watcher/customizing-watches.asciidoc index 66204a6d0f5b9..fc45bc636bfc5 100644 --- a/x-pack/docs/en/watcher/customizing-watches.asciidoc +++ b/x-pack/docs/en/watcher/customizing-watches.asciidoc @@ -36,6 +36,7 @@ fields in the payload: } } ------------------------------------- +// NOTCONSOLE See <> for more details. @@ -74,6 +75,7 @@ For example, the following `search` input loads the latest VIX quote: } } -------------------------------------------------- +// NOTCONSOLE <1> Will resolve to today's daily quotes index See <> for more details. @@ -105,7 +107,7 @@ Amsterdam using http://openweathermap.org/appid[OpenWeatherMap] online service: } } -------------------------------------------------- - +// NOTCONSOLE See <> for more details. [[chaining-inputs]] @@ -146,7 +148,7 @@ returned any hits: "compare" : { "ctx.payload.hits.total" : { "gt" : 0 }} }, -------------------------------------------------- - +// NOTCONSOLE See <> for more details. ==== Powerful Comparison Logic with the Script Condition @@ -176,7 +178,7 @@ VIX quote loaded by the `http` input is either greater than 5% or lower than -5% } } -------------------------------------------------- - +// NOTCONSOLE See <> for more details. [[using-transforms]] @@ -231,6 +233,7 @@ attaches the payload data to the message: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action <2> The action type, in this case it's an `email` action @@ -261,6 +264,7 @@ creates a new issue in GitHub } } -------------------------------------------------- +// NOTCONSOLE <1> `` is the owner of the GitHub repo and `` is the name of the repo. <2> The username that creates the issue <3> The password of that user diff --git a/x-pack/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc b/x-pack/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc index 041a8ec81a7e3..d933a38d7d670 100644 --- a/x-pack/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc +++ b/x-pack/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc @@ -13,6 +13,7 @@ To ingest this data with Logstash: . Create a Logstash configuration file that uses the {logstash-ref}/plugins-inputs-stdin.html[Logstash standard input] and the {logstash-ref}/plugins-outputs-stdout.html[Logstash standard output] and save it in `logstash-{version}` directory as `livestream.conf`: + +-- [source,ruby] ---------------------------------------------------------- input { @@ -38,16 +39,20 @@ output { <2> } ---------------------------------------------------------- +// NOTCONSOLE <1> The meetup data stream is formatted in JSON. <2> Index the meetup data into Elasticsearch. +-- . To start indexing the meetup data, pipe the RSVP stream into Logstash and specify your `livestream.conf` configuration file. + -[source,she] +-- +[source,shell] ---------------------------------------------------------- - curl http://stream.meetup.com/2/rsvps | bin/logstash -f livestream.conf --------------------------------------------------------- +// NOTCONSOLE +-- Now that you're indexing the meetup RSVPs, you can set up a watch that lets you know about events you might be interested in. For example, let's create a watch that runs every hour, looks for events that talk about about _Open Source_, and sends an email with information about the events. @@ -56,6 +61,7 @@ To set up the watch: . Specify how often you want to run the watch by adding a schedule trigger to the watch: + +-- [source,js] -------------------------------------------------- { @@ -65,8 +71,11 @@ To set up the watch: } }, -------------------------------------------------- +// NOTCONSOLE +-- . Load data into the watch payload by creating an input that searches the meetup data for events that have _Open Source_ as a topic. You can use aggregations to group the data by city, consolidate references to the same events, and sort the events by date. + +-- [source,js] ------------------------------------------------- "input": { @@ -135,19 +144,28 @@ To set up the watch: } }, -------------------------------------------------- +// NOTCONSOLE <1> Elasticsearch Date math is used to select the Logstash indices that contain the meetup data. The second pattern is needed in case the previous hour crosses days. <2> Find all of the RSVPs with `Open Source` as a topic. <3> Group the RSVPs by city. <4> Consolidate multiple RSVPs for the same event. <5> Sort the events so the latest events are listed first. <6> Group the events by name. +-- . To determine whether or not there are any Open Source events, add a compare condition that checks the watch payload to see if there were any search hits. ++ +-- [source,js] -------------------------------------------------- "compare" : { "ctx.payload.hits.total" : { "gt" : 0 }} -------------------------------------------------- +// NOTCONSOLE +-- + . To send an email when _Open Source_ events are found, add an email action: ++ +-- [source,js] -------------------------------------------------- "actions": { @@ -167,6 +185,8 @@ To set up the watch: } } --------------------------------------------------- +// NOTCONSOLE +-- NOTE: To enable Watcher to send emails, you must configure an email account in `elasticsearch.yml`. For more information, see <>. diff --git a/x-pack/docs/en/watcher/how-watcher-works.asciidoc b/x-pack/docs/en/watcher/how-watcher-works.asciidoc index b47b83dbf1ede..2bd19c1a41e02 100644 --- a/x-pack/docs/en/watcher/how-watcher-works.asciidoc +++ b/x-pack/docs/en/watcher/how-watcher-works.asciidoc @@ -283,6 +283,7 @@ The following snippet shows the basic structure of the _Watch Execution Context_ "vars" : { ... } <6> } ---------------------------------------------------------------------- +// NOTCONSOLE <1> Any static metadata specified in the watch definition. <2> The current watch payload. <3> The id of the executing watch. @@ -348,6 +349,7 @@ in sent emails: } } ---------------------------------------------------------------------- +// NOTCONSOLE [float] [[inline-templates-scripts]] @@ -369,6 +371,7 @@ the context metadata. } } ---------------------------------------------------------------------- +// NOTCONSOLE For a script, you simply specify the inline script as the value of the `script` field. For example: @@ -379,6 +382,7 @@ field. For example: "script" : "return true" } ---------------------------------------------------------------------- +// NOTCONSOLE You can also explicitly specify the inline type by using a formal object definition as the field value. For example: @@ -395,6 +399,7 @@ definition as the field value. For example: } } ---------------------------------------------------------------------- +// NOTCONSOLE The formal object definition for a script would be: @@ -406,6 +411,7 @@ The formal object definition for a script would be: } } ---------------------------------------------------------------------- +// NOTCONSOLE [float] [[stored-templates-scripts]] @@ -436,3 +442,4 @@ references the `email_notification_subject` template: } } ---------------------------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/input/chain.asciidoc b/x-pack/docs/en/watcher/input/chain.asciidoc index 1984b60d45e20..9952773e7227a 100644 --- a/x-pack/docs/en/watcher/input/chain.asciidoc +++ b/x-pack/docs/en/watcher/input/chain.asciidoc @@ -38,6 +38,7 @@ path set by a `simple` input: } } -------------------------------------------------- +// NOTCONSOLE <1> The inputs in a chain are specified as an array to guarantee the order in which the inputs are processed. (JSON does not guarantee the order of arbitrary objects.) @@ -90,3 +91,4 @@ still be available in its original form in `ctx.payload.first`. } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/input/http.asciidoc b/x-pack/docs/en/watcher/input/http.asciidoc index 451903777d159..79d37d14a1bf4 100644 --- a/x-pack/docs/en/watcher/input/http.asciidoc +++ b/x-pack/docs/en/watcher/input/http.asciidoc @@ -40,6 +40,7 @@ index: } } -------------------------------------------------- +// NOTCONSOLE You can use the full Elasticsearch {ref}/query-dsl.html[Query DSL] to perform more sophisticated searches. For example, the following `http` input retrieves @@ -58,6 +59,7 @@ all documents that contain `event` in the `category` field: } } -------------------------------------------------- +// NOTCONSOLE ==== Calling Elasticsearch APIs @@ -82,6 +84,7 @@ Stats] API and enables the `human` attribute: } } -------------------------------------------------- +// NOTCONSOLE <1> Enabling this attribute returns the `bytes` values in the response in human readable format. @@ -110,6 +113,7 @@ a username and password to access `myservice`: } } -------------------------------------------------- +// NOTCONSOLE You can also pass in service-specific API keys and other information through the `params` attribute. For example, the following `http` @@ -131,6 +135,7 @@ http://openweathermap.org/appid[OpenWeatherMap] service: } } -------------------------------------------------- +// NOTCONSOLE ==== Using Templates @@ -153,6 +158,7 @@ and restrict the results to documents added within the last five minutes: } } -------------------------------------------------- +// NOTCONSOLE ==== Accessing the HTTP Response diff --git a/x-pack/docs/en/watcher/input/search.asciidoc b/x-pack/docs/en/watcher/input/search.asciidoc index a9782c482bd37..7ce67bfc1dc2b 100644 --- a/x-pack/docs/en/watcher/input/search.asciidoc +++ b/x-pack/docs/en/watcher/input/search.asciidoc @@ -32,6 +32,7 @@ documents from the `logs` index: } } -------------------------------------------------- +// NOTCONSOLE You can use date math and wildcards when specifying indices. For example, the following input loads the latest VIXZ quote from today's daily quotes index: @@ -57,6 +58,7 @@ the following input loads the latest VIXZ quote from today's daily quotes index: } } -------------------------------------------------- +// NOTCONSOLE ==== Extracting Specific Fields @@ -78,6 +80,7 @@ watch payload: } }, -------------------------------------------------- +// NOTCONSOLE ==== Using Templates @@ -105,6 +108,7 @@ parameter: ... } -------------------------------------------------- +// NOTCONSOLE ==== Applying Conditions @@ -131,6 +135,7 @@ check if the search returned more than five hits: ... } -------------------------------------------------- +// NOTCONSOLE ==== Accessing the Search Results diff --git a/x-pack/docs/en/watcher/input/simple.asciidoc b/x-pack/docs/en/watcher/input/simple.asciidoc index 3b7b4c5734c57..c756a4e5403e2 100644 --- a/x-pack/docs/en/watcher/input/simple.asciidoc +++ b/x-pack/docs/en/watcher/input/simple.asciidoc @@ -20,6 +20,7 @@ an object (`obj`): } } -------------------------------------------------- +// NOTCONSOLE For example, the following watch uses the `simple` input to set the recipient name for a daily reminder email: @@ -48,3 +49,4 @@ name for a daily reminder email: } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/transform.asciidoc b/x-pack/docs/en/watcher/transform.asciidoc index 0351c9b8c1214..8241d7b0cb442 100644 --- a/x-pack/docs/en/watcher/transform.asciidoc +++ b/x-pack/docs/en/watcher/transform.asciidoc @@ -52,6 +52,7 @@ part of the definition of the `my_webhook` action. ] } -------------------------------------------------- +// NOTCONSOLE <1> A watch level `transform` <2> An action level `transform` diff --git a/x-pack/docs/en/watcher/transform/chain.asciidoc b/x-pack/docs/en/watcher/transform/chain.asciidoc index f17b05c71b4cc..9ad27fe48ed81 100644 --- a/x-pack/docs/en/watcher/transform/chain.asciidoc +++ b/x-pack/docs/en/watcher/transform/chain.asciidoc @@ -33,6 +33,7 @@ following snippet: ] } -------------------------------------------------- +// NOTCONSOLE <1> The `chain` transform definition <2> The first transform in the chain (in this case, a `search` transform) <3> The second and final transform in the chain (in this case, a `script` diff --git a/x-pack/docs/en/watcher/transform/script.asciidoc b/x-pack/docs/en/watcher/transform/script.asciidoc index 0a3bd401dc744..f1a46d482d9e6 100644 --- a/x-pack/docs/en/watcher/transform/script.asciidoc +++ b/x-pack/docs/en/watcher/transform/script.asciidoc @@ -20,6 +20,7 @@ TIP: The `script` transform is often useful when used in combination with the } } -------------------------------------------------- +// NOTCONSOLE <1> A simple `painless` script that creates a new payload with a single `time` field holding the scheduled time. diff --git a/x-pack/docs/en/watcher/transform/search.asciidoc b/x-pack/docs/en/watcher/transform/search.asciidoc index eaf7c80c6cbb3..56f9304d986ce 100644 --- a/x-pack/docs/en/watcher/transform/search.asciidoc +++ b/x-pack/docs/en/watcher/transform/search.asciidoc @@ -18,6 +18,7 @@ defined on the watch level: } } -------------------------------------------------- +// NOTCONSOLE Like every other search based construct, one can make use of the full search API supported by Elasticsearch. For example, the following search transform @@ -41,6 +42,7 @@ execute a search over all events indices, matching events with `error` priority: } } -------------------------------------------------- +// NOTCONSOLE The following table lists all available settings for the search transform: @@ -129,6 +131,7 @@ time of the watch: } } -------------------------------------------------- +// NOTCONSOLE The model of the template is a union between the provided `template.params` settings and the <>. @@ -173,3 +176,4 @@ The following is an example of using templates that refer to provided parameters } } -------------------------------------------------- +// NOTCONSOLE diff --git a/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc index 57d330510971d..57a6ebdfd92ef 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc @@ -184,6 +184,7 @@ that triggers every day at noon: ... } -------------------------------------------------- +// NOTCONSOLE ===== Configuring a Multiple Times Cron Schedule @@ -207,6 +208,7 @@ minute during the weekend: ... } -------------------------------------------------- +// NOTCONSOLE [[croneval]] ===== Verifying Cron Expressions diff --git a/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc index e3165695e6aa8..e729335d59b29 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc @@ -28,6 +28,7 @@ day at 5:00 PM: } } -------------------------------------------------- +// NOTCONSOLE ===== Configuring a Multiple Times Daily Schedule @@ -45,6 +46,7 @@ triggers at `00:00`, `12:00`, and `17:00` every day. } } -------------------------------------------------- +// NOTCONSOLE [[specifying-times-using-objects]] ===== Specifying Times Using Objects @@ -69,6 +71,7 @@ For example, the following `daily` schedule triggers once every day at 5:00 PM: } } -------------------------------------------------- +// NOTCONSOLE To specify multiple times using the object notation, you specify multiple hours or minutes as an array. For example, following `daily` schedule triggers at @@ -89,3 +92,4 @@ or minutes as an array. For example, following `daily` schedule triggers at } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc index 48cc9dc2aa4a8..9ec750eebcd2b 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc @@ -28,6 +28,7 @@ For example, the following `hourly` schedule triggers at minute 30 every hour-- } } -------------------------------------------------- +// NOTCONSOLE ===== Configuring a Multiple Times Hourly Schedule @@ -46,3 +47,4 @@ triggers every 15 minutes every hour--`12:00`, `12:15`, `12:30`, `12:45`, } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc index b65c16646e176..e534181ec0c2f 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc @@ -34,3 +34,4 @@ For example, the following `interval` schedule triggers every five minutes: } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc index e6bf292d91811..d2cfe409992a7 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc @@ -26,6 +26,7 @@ on the 10th of each month at noon: } } -------------------------------------------------- +// NOTCONSOLE NOTE: You can also specify the day and time with the `day` and `time` attributes, they are interchangeable with `on` and `at`. @@ -50,6 +51,7 @@ schedule triggers at 12:00 PM on the 10th of each month and at 5:00 PM on the } } -------------------------------------------------- +// NOTCONSOLE Alternatively, you can specify days and times in an object that has `on` and `at` attributes that contain an array of values. For example, the following `monthly` @@ -68,3 +70,4 @@ schedule triggers at 12:00 AM and 12:00 PM on the 10th and 20th of each month. } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc index a5ac52d0e0d01..d6a403cb125c6 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc @@ -32,6 +32,7 @@ triggers once a week on Friday at 5:00 PM: } } -------------------------------------------------- +// NOTCONSOLE NOTE: You can also specify the day and time with the `day` and `time` attributes, they are interchangeable with `on` and `at`. @@ -55,6 +56,7 @@ schedule triggers every Tuesday at 12:00 PM and every Friday at 5:00 PM: } } -------------------------------------------------- +// NOTCONSOLE Alternatively, you can specify days and times in an object that has `on` and `minute` attributes that contain an array of values. For example, the following @@ -73,3 +75,4 @@ Alternatively, you can specify days and times in an object that has `on` and } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc index 9ea9e1d1b47bc..d11cc5d072787 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc @@ -37,6 +37,7 @@ example, the following `yearly` schedule triggers once a year at noon on January } } -------------------------------------------------- +// NOTCONSOLE NOTE: You can also specify the month, day, and time with the `month`, `day`, and `time` attributes, they are interchangeable with `in`, `on`, and `at`. @@ -61,6 +62,7 @@ on July 20th. } } -------------------------------------------------- +// NOTCONSOLE Alternatively, you can specify the months, days, and times in an object that has `in`, `on`, and `minute` attributes that contain an array of values. For example, @@ -81,3 +83,4 @@ January 20th, December 10th, and December 20th. } } -------------------------------------------------- +// NOTCONSOLE diff --git a/x-pack/docs/en/watcher/troubleshooting.asciidoc b/x-pack/docs/en/watcher/troubleshooting.asciidoc index 8b793142ecc2b..20d599f8f5215 100644 --- a/x-pack/docs/en/watcher/troubleshooting.asciidoc +++ b/x-pack/docs/en/watcher/troubleshooting.asciidoc @@ -30,6 +30,8 @@ mappings: -------------------------------------------------- DELETE .watches -------------------------------------------------- +// CONSOLE +// TEST[skip:index deletion] + . Disable direct access to the `.watches` index: .. Stop the Elasticsearch node. From b7ef75fed6d75e1a88b000e36ee78a018c0c53d0 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Sat, 23 Jun 2018 09:39:17 +0200 Subject: [PATCH 86/92] Add get field mappings to High Level REST API Client (#31423) Add get field mappings to High Level REST API Client Relates to #27205 --- .../elasticsearch/client/IndicesClient.java | 31 +++++ .../client/RequestConverters.java | 20 +++ .../elasticsearch/client/IndicesClientIT.java | 38 ++++++ .../client/RequestConvertersTests.java | 64 ++++++++- .../IndicesClientDocumentationIT.java | 106 +++++++++++++++ .../indices/get_field_mappings.asciidoc | 86 ++++++++++++ .../high-level/supported-apis.asciidoc | 2 + .../mapping/get/GetFieldMappingsResponse.java | 123 +++++++++++++++++- .../get/GetFieldMappingsResponseTests.java | 100 +++++++++++++- 9 files changed, 562 insertions(+), 8 deletions(-) create mode 100644 docs/java-rest/high-level/indices/get_field_mappings.asciidoc diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 30a42eb333f4a..28a9cc2036673 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -37,6 +37,8 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -188,6 +190,35 @@ public void getMappingsAsync(GetMappingsRequest getMappingsRequest, RequestOptio GetMappingsResponse::fromXContent, listener, emptySet()); } + /** + * Retrieves the field mappings on an index or indices using the Get Field Mapping API. + * See + * Get Field Mapping API on elastic.co + * @param getFieldMappingsRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetFieldMappingsResponse getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest, + RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(getFieldMappingsRequest, RequestConverters::getFieldMapping, options, + GetFieldMappingsResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously retrieves the field mappings on an index on indices using the Get Field Mapping API. + * See + * Get Field Mapping API on elastic.co + * @param getFieldMappingsRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void getFieldMappingAsync(GetFieldMappingsRequest getFieldMappingsRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(getFieldMappingsRequest, RequestConverters::getFieldMapping, options, + GetFieldMappingsResponse::fromXContent, listener, emptySet()); + } + /** * Updates aliases using the Index Aliases API. * See diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 3d5d275732a6c..63a0e0e98377e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -50,6 +50,7 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; @@ -230,6 +231,25 @@ static Request getMappings(GetMappingsRequest getMappingsRequest) throws IOExcep return request; } + static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) throws IOException { + String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices(); + String[] types = getFieldMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.types(); + String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields(); + + String endpoint = new EndpointBuilder().addCommaSeparatedPathParts(indices) + .addPathPartAsIs("_mapping").addCommaSeparatedPathParts(types) + .addPathPartAsIs("field").addCommaSeparatedPathParts(fields) + .build(); + + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + Params parameters = new Params(request); + parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions()); + parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults()); + parameters.withLocal(getFieldMappingsRequest.local()); + return request; + } + static Request refresh(RefreshRequest refreshRequest) { String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices(); Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_refresh")); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index c226b5349267c..5f8e6b5d36526 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -43,6 +43,8 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -74,6 +76,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -378,6 +381,41 @@ public void testGetMapping() throws IOException { assertThat(mappings, equalTo(expected)); } + public void testGetFieldMapping() throws IOException { + String indexName = "test"; + createIndex(indexName, Settings.EMPTY); + + PutMappingRequest putMappingRequest = new PutMappingRequest(indexName); + putMappingRequest.type("_doc"); + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject().startObject("properties").startObject("field"); + mappingBuilder.field("type", "text"); + mappingBuilder.endObject().endObject().endObject(); + putMappingRequest.source(mappingBuilder); + + PutMappingResponse putMappingResponse = + execute(putMappingRequest, highLevelClient().indices()::putMapping, highLevelClient().indices()::putMappingAsync); + assertTrue(putMappingResponse.isAcknowledged()); + + GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest() + .indices(indexName) + .types("_doc") + .fields("field"); + + GetFieldMappingsResponse getFieldMappingsResponse = + execute(getFieldMappingsRequest, + highLevelClient().indices()::getFieldMapping, + highLevelClient().indices()::getFieldMappingAsync); + + final Map fieldMappingMap = + getFieldMappingsResponse.mappings().get(indexName).get("_doc"); + + final GetFieldMappingsResponse.FieldMappingMetaData metaData = + new GetFieldMappingsResponse.FieldMappingMetaData("field", + new BytesArray("{\"field\":{\"type\":\"text\"}}")); + assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metaData))); + } + public void testDeleteIndex() throws IOException { { // Delete index if exists diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index e8bbbf6f5fd0c..b8714967b412a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; @@ -457,6 +458,61 @@ public void testGetMapping() throws IOException { assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); } + public void testGetFieldMapping() throws IOException { + GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest(); + + String[] indices = Strings.EMPTY_ARRAY; + if (randomBoolean()) { + indices = randomIndicesNames(0, 5); + getFieldMappingsRequest.indices(indices); + } else if (randomBoolean()) { + getFieldMappingsRequest.indices((String[]) null); + } + + String type = null; + if (randomBoolean()) { + type = randomAlphaOfLengthBetween(3, 10); + getFieldMappingsRequest.types(type); + } else if (randomBoolean()) { + getFieldMappingsRequest.types((String[]) null); + } + + String[] fields = null; + if (randomBoolean()) { + fields = new String[randomIntBetween(1, 5)]; + for (int i = 0; i < fields.length; i++) { + fields[i] = randomAlphaOfLengthBetween(3, 10); + } + getFieldMappingsRequest.fields(fields); + } else if (randomBoolean()) { + getFieldMappingsRequest.fields((String[]) null); + } + + Map expectedParams = new HashMap<>(); + + setRandomIndicesOptions(getFieldMappingsRequest::indicesOptions, getFieldMappingsRequest::indicesOptions, expectedParams); + setRandomLocal(getFieldMappingsRequest::local, expectedParams); + + Request request = RequestConverters.getFieldMapping(getFieldMappingsRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + String index = String.join(",", indices); + if (Strings.hasLength(index)) { + endpoint.add(index); + } + endpoint.add("_mapping"); + if (type != null) { + endpoint.add(type); + } + endpoint.add("field"); + if (fields != null) { + endpoint.add(String.join(",", fields)); + } + assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + + assertThat(expectedParams, equalTo(request.getParameters())); + assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); + } + public void testDeleteIndex() { String[] indices = randomIndicesNames(0, 5); DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indices); @@ -2268,16 +2324,20 @@ private static void setRandomHumanReadable(GetIndexRequest request, Map request, Map expectedParams) { + private static void setRandomLocal(Consumer setter, Map expectedParams) { if (randomBoolean()) { boolean local = randomBoolean(); - request.local(local); + setter.accept(local); if (local) { expectedParams.put("local", String.valueOf(local)); } } } + private static void setRandomLocal(MasterNodeReadRequest request, Map expectedParams) { + setRandomLocal(request::local, expectedParams); + } + private static void setRandomTimeout(Consumer setter, TimeValue defaultTimeout, Map expectedParams) { if (randomBoolean()) { String timeout = randomTimeValue(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 9cc28152d03e3..95fa7f7185b5b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -41,6 +41,8 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -703,6 +705,110 @@ public void onFailure(Exception e) { } } + public void testGetFieldMapping() throws IOException, InterruptedException { + RestHighLevelClient client = highLevelClient(); + + { + CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("twitter"), RequestOptions.DEFAULT); + assertTrue(createIndexResponse.isAcknowledged()); + PutMappingRequest request = new PutMappingRequest("twitter"); + request.type("tweet"); + request.source( + "{\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"timestamp\": {\n" + + " \"type\": \"date\"\n" + + " }\n" + + " }\n" + + "}", // <1> + XContentType.JSON); + PutMappingResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); + assertTrue(putMappingResponse.isAcknowledged()); + } + + // tag::get-field-mapping-request + GetFieldMappingsRequest request = new GetFieldMappingsRequest(); // <1> + request.indices("twitter"); // <2> + request.types("tweet"); // <3> + request.fields("message", "timestamp"); // <4> + // end::get-field-mapping-request + + // tag::get-field-mapping-request-indicesOptions + request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1> + // end::get-field-mapping-request-indicesOptions + + // tag::get-field-mapping-request-local + request.local(true); // <1> + // end::get-field-mapping-request-local + + { + + // tag::get-field-mapping-execute + GetFieldMappingsResponse response = + client.indices().getFieldMapping(request, RequestOptions.DEFAULT); + // end::get-field-mapping-execute + + // tag::get-field-mapping-response + final Map>> mappings = + response.mappings();// <1> + final Map typeMappings = + mappings.get("twitter").get("tweet"); // <2> + final GetFieldMappingsResponse.FieldMappingMetaData metaData = + typeMappings.get("message");// <3> + + final String fullName = metaData.fullName();// <4> + final Map source = metaData.sourceAsMap(); // <5> + // end::get-field-mapping-response + } + + { + // tag::get-field-mapping-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetFieldMappingsResponse putMappingResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-field-mapping-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + final ActionListener latchListener = new LatchedActionListener<>(listener, latch); + listener = ActionListener.wrap(r -> { + final Map>> mappings = + r.mappings(); + final Map typeMappings = + mappings.get("twitter").get("tweet"); + final GetFieldMappingsResponse.FieldMappingMetaData metaData1 = typeMappings.get("message"); + + final String fullName = metaData1.fullName(); + final Map source = metaData1.sourceAsMap(); + latchListener.onResponse(r); + }, e -> { + latchListener.onFailure(e); + fail("should not fail"); + }); + + // tag::get-field-mapping-execute-async + client.indices().getFieldMappingAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::get-field-mapping-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + + } + + public void testOpenIndex() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/docs/java-rest/high-level/indices/get_field_mappings.asciidoc b/docs/java-rest/high-level/indices/get_field_mappings.asciidoc new file mode 100644 index 0000000000000..3f5ff5aec6449 --- /dev/null +++ b/docs/java-rest/high-level/indices/get_field_mappings.asciidoc @@ -0,0 +1,86 @@ +[[java-rest-high-get-field-mappings]] +=== Get Field Mappings API + +[[java-rest-high-get-field-mappings-request]] +==== Get Field Mappings Request + +A `GetFieldMappingsRequest` can have an optional list of indices, optional list of types and the list of fields: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-request] +-------------------------------------------------- +<1> An empty request +<2> Setting the indices to fetch mapping for +<3> The types to be returned +<4> The fields to be returned + +==== Optional arguments +The following arguments can also optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-request-indicesOptions] +-------------------------------------------------- +<1> Setting `IndicesOptions` controls how unavailable indices are resolved and +how wildcard expressions are expanded + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-request-local] +-------------------------------------------------- +<1> The `local` flag (defaults to `false`) controls whether the aliases need +to be looked up in the local cluster state or in the cluster state held by +the elected master node + +[[java-rest-high-get-field-mappings-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-execute] +-------------------------------------------------- + +[[java-rest-high-get-field-mapping-async]] +==== Asynchronous Execution + +The asynchronous execution of a get mappings request requires both the +`GetFieldMappingsRequest` instance and an `ActionListener` instance to be passed to +the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-execute-async] +-------------------------------------------------- +<1> The `GetFieldMappingsRequest` to execute and the `ActionListener` to use when the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method if +the execution successfully completed or using the `onFailure` method if it +failed. + +A typical listener for `GetMappingsResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-get-field-mapping-response]] +==== Get Field Mappings Response + +The returned `GetFieldMappingsResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-response] +-------------------------------------------------- +<1> Returning all requested indices fields' mappings +<2> Retrieving the mappings for a particular index and type +<3> Getting the mappings metadata for the `message` field +<4> Getting the full name of the field +<5> Getting the mapping source of the field + diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 418eb528f8e00..9ed54db817551 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -77,6 +77,7 @@ Index Management:: Mapping Management:: * <> +* <> Alias Management:: * <> @@ -98,6 +99,7 @@ include::indices/force_merge.asciidoc[] include::indices/rollover.asciidoc[] include::indices/put_mapping.asciidoc[] include::indices/get_mappings.asciidoc[] +include::indices/get_field_mappings.asciidoc[] include::indices/update_aliases.asciidoc[] include::indices/exists_alias.asciidoc[] include::indices/get_alias.asciidoc[] diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index d837c1cbd199b..81b9812d61c5f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -20,13 +20,17 @@ package org.elasticsearch.action.admin.indices.mapping.get; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.Mapper; @@ -34,13 +38,45 @@ import java.io.InputStream; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; /** Response object for {@link GetFieldMappingsRequest} API */ public class GetFieldMappingsResponse extends ActionResponse implements ToXContentFragment { + private static final ParseField MAPPINGS = new ParseField("mappings"); + + private static final ObjectParser>, String> PARSER = + new ObjectParser<>(MAPPINGS.getPreferredName(), true, HashMap::new); + + static { + PARSER.declareField((p, typeMappings, index) -> { + p.nextToken(); + while (p.currentToken() == XContentParser.Token.FIELD_NAME) { + final String typeName = p.currentName(); + + if (p.nextToken() == XContentParser.Token.START_OBJECT) { + final Map typeMapping = new HashMap<>(); + typeMappings.put(typeName, typeMapping); + + while (p.nextToken() == XContentParser.Token.FIELD_NAME) { + final String fieldName = p.currentName(); + final FieldMappingMetaData fieldMappingMetaData = FieldMappingMetaData.fromXContent(p); + typeMapping.put(fieldName, fieldMappingMetaData); + } + } else { + p.skipChildren(); + } + p.nextToken(); + } + }, MAPPINGS, ObjectParser.ValueType.OBJECT); + } + private Map>> mappings = emptyMap(); GetFieldMappingsResponse(Map>> mappings) { @@ -77,7 +113,7 @@ public FieldMappingMetaData fieldMappings(String index, String type, String fiel public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { for (Map.Entry>> indexEntry : mappings.entrySet()) { builder.startObject(indexEntry.getKey()); - builder.startObject("mappings"); + builder.startObject(MAPPINGS.getPreferredName()); for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { builder.startObject(typeEntry.getKey()); for (Map.Entry fieldEntry : typeEntry.getValue().entrySet()) { @@ -93,9 +129,46 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + public static GetFieldMappingsResponse fromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + + final Map>> mappings = new HashMap<>(); + if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { + while (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + final String index = parser.currentName(); + + final Map> typeMappings = PARSER.parse(parser, index); + mappings.put(index, typeMappings); + + parser.nextToken(); + } + } + + return new GetFieldMappingsResponse(mappings); + } + public static class FieldMappingMetaData implements ToXContentFragment { public static final FieldMappingMetaData NULL = new FieldMappingMetaData("", BytesArray.EMPTY); + private static final ParseField FULL_NAME = new ParseField("full_name"); + private static final ParseField MAPPING = new ParseField("mapping"); + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("field_mapping_meta_data", true, + a -> new FieldMappingMetaData((String)a[0], (BytesReference)a[1]) + ); + + static { + PARSER.declareField(optionalConstructorArg(), + (p, c) -> p.text(), FULL_NAME, ObjectParser.ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), + (p, c) -> { + final XContentBuilder jsonBuilder = jsonBuilder().copyCurrentStructure(p); + final BytesReference bytes = BytesReference.bytes(jsonBuilder); + return bytes; + }, MAPPING, ObjectParser.ValueType.OBJECT); + } + private String fullName; private BytesReference source; @@ -122,18 +195,41 @@ BytesReference getSource() { return source; } + public static FieldMappingMetaData fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("full_name", fullName); + builder.field(FULL_NAME.getPreferredName(), fullName); if (params.paramAsBoolean("pretty", false)) { builder.field("mapping", sourceAsMap()); } else { try (InputStream stream = source.streamInput()) { - builder.rawField("mapping", stream, XContentType.JSON); + builder.rawField(MAPPING.getPreferredName(), stream, XContentType.JSON); } } return builder; } + + @Override + public String toString() { + return "FieldMappingMetaData{fullName='" + fullName + '\'' + ", source=" + source + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof FieldMappingMetaData)) return false; + FieldMappingMetaData that = (FieldMappingMetaData) o; + return Objects.equals(fullName, that.fullName) && + Objects.equals(source, that.source); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, source); + } } @Override @@ -178,4 +274,25 @@ public void writeTo(StreamOutput out) throws IOException { } } } + + @Override + public String toString() { + return "GetFieldMappingsResponse{" + + "mappings=" + mappings + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof GetFieldMappingsResponse)) return false; + GetFieldMappingsResponse that = (GetFieldMappingsResponse) o; + return Objects.equals(mappings, that.mappings); + } + + @Override + public int hashCode() { + return Objects.hash(mappings); + } + } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java index 4dc396323c048..b6e785a4d05be 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java @@ -23,16 +23,22 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.function.Predicate; -public class GetFieldMappingsResponseTests extends ESTestCase { +import static org.hamcrest.CoreMatchers.equalTo; - public void testSerialization() throws IOException { +public class GetFieldMappingsResponseTests extends AbstractStreamableXContentTestCase { + + public void testManualSerialization() throws IOException { Map>> mappings = new HashMap<>(); FieldMappingMetaData fieldMappingMetaData = new FieldMappingMetaData("my field", new BytesArray("{}")); mappings.put("index", Collections.singletonMap("type", Collections.singletonMap("field", fieldMappingMetaData))); @@ -49,4 +55,92 @@ public void testSerialization() throws IOException { } } } + + public void testManualJunkedJson() throws Exception { + // in fact random fields could be evaluated as proper mapping, while proper junk in this case is arrays and values + final String json = + "{\"index1\":{\"mappings\":" + + "{\"doctype0\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," + + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}," + // junk here + + "\"junk1\": [\"field1\", {\"field2\":{}}]," + + "\"junk2\": [{\"field3\":{}}]," + + "\"junk3\": 42," + + "\"junk4\": \"Q\"," + + "\"doctype1\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," + + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}}}," + + "\"index0\":{\"mappings\":" + + "{\"doctype0\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," + + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}," + + "\"doctype1\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," + + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}}}}"; + + final XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), + LoggingDeprecationHandler.INSTANCE, json.getBytes("UTF-8")); + + final GetFieldMappingsResponse response = GetFieldMappingsResponse.fromXContent(parser); + + FieldMappingMetaData fieldMappingMetaData = + new FieldMappingMetaData("my field", new BytesArray("{\"type\":\"keyword\"}")); + Map fieldMapping = new HashMap<>(); + fieldMapping.put("field0", fieldMappingMetaData); + fieldMapping.put("field1", fieldMappingMetaData); + + Map> typeMapping = new HashMap<>(); + typeMapping.put("doctype0", fieldMapping); + typeMapping.put("doctype1", fieldMapping); + + Map>> mappings = new HashMap<>(); + mappings.put("index0", typeMapping); + mappings.put("index1", typeMapping); + + final Map>> responseMappings = response.mappings(); + assertThat(responseMappings, equalTo(mappings)); + } + + @Override + protected GetFieldMappingsResponse doParseInstance(XContentParser parser) throws IOException { + return GetFieldMappingsResponse.fromXContent(parser); + } + + @Override + protected GetFieldMappingsResponse createBlankInstance() { + return new GetFieldMappingsResponse(); + } + + @Override + protected GetFieldMappingsResponse createTestInstance() { + return new GetFieldMappingsResponse(randomMapping()); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // allow random fields at the level of `index` and `index.mappings.doctype.field` + // otherwise random field could be evaluated as index name or type name + return s -> false == (s.matches("(?[^.]+)") + || s.matches("(?[^.]+)\\.mappings\\.(?[^.]+)\\.(?[^.]+)")); + } + + private Map>> randomMapping() { + Map>> mappings = new HashMap<>(); + + int indices = randomInt(10); + for(int i = 0; i < indices; i++) { + final Map> doctypesMappings = new HashMap<>(); + int doctypes = randomInt(10); + for(int j = 0; j < doctypes; j++) { + Map fieldMappings = new HashMap<>(); + int fields = randomInt(10); + for(int k = 0; k < fields; k++) { + final String mapping = randomBoolean() ? "{\"type\":\"string\"}" : "{\"type\":\"keyword\"}"; + FieldMappingMetaData metaData = + new FieldMappingMetaData("my field", new BytesArray(mapping)); + fieldMappings.put("field" + k, metaData); + } + doctypesMappings.put("doctype" + j, fieldMappings); + } + mappings.put("index" + i, doctypesMappings); + } + return mappings; + } } From e401710f6ea7fd43b1ddb70dd3f5a017d90a1eaf Mon Sep 17 00:00:00 2001 From: olcbean Date: Sun, 24 Jun 2018 04:14:34 +0200 Subject: [PATCH 87/92] TEST: Correct the assertion arguments order (#31540) This commit corrects the order of the assertion arguments in HLRest client. --- .../documentation/CRUDDocumentationIT.java | 44 +++++++++---------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 4193685f14bc2..b8a6b7d2d8ad2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -113,7 +113,7 @@ public void testIndex() throws Exception { .source(jsonMap); // <1> //end::index-request-map IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.CREATED); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } { //tag::index-request-xcontent @@ -129,7 +129,7 @@ public void testIndex() throws Exception { .source(builder); // <1> //end::index-request-xcontent IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult()); } { //tag::index-request-shortcut @@ -139,7 +139,7 @@ public void testIndex() throws Exception { "message", "trying out Elasticsearch"); // <1> //end::index-request-shortcut IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult()); } { //tag::index-request-string @@ -158,7 +158,7 @@ public void testIndex() throws Exception { // tag::index-execute IndexResponse indexResponse = client.index(request, RequestOptions.DEFAULT); // end::index-execute - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult()); // tag::index-response String index = indexResponse.getIndex(); @@ -269,7 +269,7 @@ public void testUpdate() throws Exception { { IndexRequest indexRequest = new IndexRequest("posts", "doc", "1").source("field", 0); IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertSame(indexResponse.status(), RestStatus.CREATED); + assertSame(RestStatus.CREATED, indexResponse.status()); Request request = new Request("POST", "/_scripts/increment-field"); request.setJsonEntity(Strings.toString(JsonXContent.contentBuilder() @@ -280,7 +280,7 @@ public void testUpdate() throws Exception { .endObject() .endObject())); Response response = client().performRequest(request); - assertEquals(response.getStatusLine().getStatusCode(), RestStatus.OK.getStatus()); + assertEquals(RestStatus.OK.getStatus(), response.getStatusLine().getStatusCode()); } { //tag::update-request @@ -298,7 +298,7 @@ public void testUpdate() throws Exception { request.script(inline); // <3> //end::update-request-with-inline-script UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertEquals(4, updateResponse.getGetResult().getSource().get("field")); request = new UpdateRequest("posts", "doc", "1").fetchSource(true); @@ -308,7 +308,7 @@ public void testUpdate() throws Exception { request.script(stored); // <2> //end::update-request-with-stored-script updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertEquals(8, updateResponse.getGetResult().getSource().get("field")); } { @@ -320,7 +320,7 @@ public void testUpdate() throws Exception { .doc(jsonMap); // <1> //end::update-request-with-doc-as-map UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); } { //tag::update-request-with-doc-as-xcontent @@ -335,7 +335,7 @@ public void testUpdate() throws Exception { .doc(builder); // <1> //end::update-request-with-doc-as-xcontent UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); } { //tag::update-request-shortcut @@ -344,7 +344,7 @@ public void testUpdate() throws Exception { "reason", "daily update"); // <1> //end::update-request-shortcut UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); } { //tag::update-request-with-doc-as-string @@ -359,7 +359,7 @@ public void testUpdate() throws Exception { // tag::update-execute UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); // end::update-execute - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); // tag::update-response String index = updateResponse.getIndex(); @@ -434,7 +434,7 @@ public void testUpdate() throws Exception { request.fetchSource(true); // <1> //end::update-request-no-source UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertNotNull(updateResponse.getGetResult()); assertEquals(3, updateResponse.getGetResult().sourceAsMap().size()); } @@ -446,7 +446,7 @@ public void testUpdate() throws Exception { request.fetchSource(new FetchSourceContext(true, includes, excludes)); // <1> //end::update-request-source-include UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); Map sourceAsMap = updateResponse.getGetResult().sourceAsMap(); assertEquals(2, sourceAsMap.size()); assertEquals("source includes", sourceAsMap.get("reason")); @@ -460,7 +460,7 @@ public void testUpdate() throws Exception { request.fetchSource(new FetchSourceContext(true, includes, excludes)); // <1> //end::update-request-source-exclude UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); Map sourceAsMap = updateResponse.getGetResult().sourceAsMap(); assertEquals(2, sourceAsMap.size()); assertEquals("source excludes", sourceAsMap.get("reason")); @@ -538,7 +538,7 @@ public void testDelete() throws Exception { { IndexRequest indexRequest = new IndexRequest("posts", "doc", "1").source("field", "value"); IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertSame(indexResponse.status(), RestStatus.CREATED); + assertSame(RestStatus.CREATED, indexResponse.status()); } { @@ -552,7 +552,7 @@ public void testDelete() throws Exception { // tag::delete-execute DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT); // end::delete-execute - assertSame(deleteResponse.getResult(), DocWriteResponse.Result.DELETED); + assertSame(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); // tag::delete-response String index = deleteResponse.getIndex(); @@ -605,7 +605,7 @@ public void testDelete() throws Exception { { IndexResponse indexResponse = client.index(new IndexRequest("posts", "doc", "1").source("field", "value") , RequestOptions.DEFAULT); - assertSame(indexResponse.status(), RestStatus.CREATED); + assertSame(RestStatus.CREATED, indexResponse.status()); // tag::delete-conflict try { @@ -621,7 +621,7 @@ public void testDelete() throws Exception { { IndexResponse indexResponse = client.index(new IndexRequest("posts", "doc", "async").source("field", "value"), RequestOptions.DEFAULT); - assertSame(indexResponse.status(), RestStatus.CREATED); + assertSame(RestStatus.CREATED, indexResponse.status()); DeleteRequest request = new DeleteRequest("posts", "doc", "async"); @@ -666,7 +666,7 @@ public void testBulk() throws Exception { // tag::bulk-execute BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT); // end::bulk-execute - assertSame(bulkResponse.status(), RestStatus.OK); + assertSame(RestStatus.OK, bulkResponse.status()); assertFalse(bulkResponse.hasFailures()); } { @@ -679,7 +679,7 @@ public void testBulk() throws Exception { .source(XContentType.JSON,"field", "baz")); // end::bulk-request-with-mixed-operations BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT); - assertSame(bulkResponse.status(), RestStatus.OK); + assertSame(RestStatus.OK, bulkResponse.status()); assertFalse(bulkResponse.hasFailures()); // tag::bulk-response @@ -778,7 +778,7 @@ public void testGet() throws Exception { "postDate", new Date(), "message", "trying out Elasticsearch"); IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.CREATED); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } { //tag::get-request From 9efb0fe9bafef8c681e39cc143e864be61949f8f Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Sun, 24 Jun 2018 11:08:45 -0700 Subject: [PATCH 88/92] Ingest Attachment: Upgrade Tika to 1.18 (#31252) Fixes ES from hanging when a bad zip file is loaded through Tika. --- plugins/ingest-attachment/build.gradle | 18 +++++++++++------- .../licenses/commons-compress-1.14.jar.sha1 | 1 - .../licenses/commons-compress-1.16.1.jar.sha1 | 1 + .../licenses/commons-io-2.5.jar.sha1 | 1 - .../licenses/commons-io-2.6.jar.sha1 | 1 + .../licenses/fontbox-2.0.8.jar.sha1 | 1 - .../licenses/fontbox-2.0.9.jar.sha1 | 1 + .../licenses/pdfbox-2.0.8.jar.sha1 | 1 - .../licenses/pdfbox-2.0.9.jar.sha1 | 1 + .../licenses/tika-core-1.17.jar.sha1 | 1 - .../licenses/tika-core-1.18.jar.sha1 | 1 + .../licenses/tika-parsers-1.17.jar.sha1 | 1 - .../licenses/tika-parsers-1.18.jar.sha1 | 1 + .../licenses/xz-1.6.jar.sha1 | 1 - .../licenses/xz-1.8.jar.sha1 | 1 + .../ingest/attachment/TikaImpl.java | 1 + .../plugin-metadata/plugin-security.policy | 2 ++ .../attachment/AttachmentProcessorTests.java | 6 ++++++ .../attachment/test/sample-files/bad_tika.zip | Bin 0 -> 416772 bytes 19 files changed, 27 insertions(+), 14 deletions(-) delete mode 100644 plugins/ingest-attachment/licenses/commons-compress-1.14.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/commons-io-2.5.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/commons-io-2.6.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/fontbox-2.0.8.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/pdfbox-2.0.8.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/tika-core-1.17.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/tika-parsers-1.17.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/xz-1.6.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/xz-1.8.jar.sha1 create mode 100644 plugins/ingest-attachment/src/test/resources/org/elasticsearch/ingest/attachment/test/sample-files/bad_tika.zip diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index 3bca078bd59c4..f000fdfeef5e0 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -23,8 +23,8 @@ esplugin { } versions << [ - 'tika': '1.17', - 'pdfbox': '2.0.8', + 'tika': '1.18', + 'pdfbox': '2.0.9', 'bouncycastle': '1.55', 'poi': '3.17', 'mime4j': '0.8.1' @@ -33,9 +33,10 @@ versions << [ dependencies { // mandatory for tika compile "org.apache.tika:tika-core:${versions.tika}" + // build against Jackson 2.9.5, but still works on our current version compile "org.apache.tika:tika-parsers:${versions.tika}" - compile 'org.tukaani:xz:1.6' - compile 'commons-io:commons-io:2.5' + compile 'org.tukaani:xz:1.8' + compile 'commons-io:commons-io:2.6' compile "org.slf4j:slf4j-api:${versions.slf4j}" // character set detection @@ -62,7 +63,7 @@ dependencies { // MS Office compile "org.apache.poi:poi-scratchpad:${versions.poi}" // Apple iWork - compile 'org.apache.commons:commons-compress:1.14' + compile 'org.apache.commons:commons-compress:1.16.1' // Outlook documents compile "org.apache.james:apache-mime4j-core:${versions.mime4j}" compile "org.apache.james:apache-mime4j-dom:${versions.mime4j}" @@ -118,6 +119,10 @@ thirdPartyAudit.excludes = [ 'com.drew.metadata.jpeg.JpegDirectory', 'com.github.junrar.Archive', 'com.github.junrar.rarfile.FileHeader', + 'com.github.luben.zstd.ZstdInputStream', + 'com.github.luben.zstd.ZstdOutputStream', + 'com.github.openjson.JSONArray', + 'com.github.openjson.JSONObject', 'com.google.common.reflect.TypeToken', 'com.google.gson.Gson', 'com.googlecode.mp4parser.DataSource', @@ -531,6 +536,7 @@ thirdPartyAudit.excludes = [ 'org.apache.commons.exec.PumpStreamHandler', 'org.apache.commons.exec.environment.EnvironmentUtils', 'org.apache.commons.lang.StringUtils', + 'org.apache.commons.lang.SystemUtils', 'org.apache.ctakes.typesystem.type.refsem.UmlsConcept', 'org.apache.ctakes.typesystem.type.textsem.IdentifiedAnnotation', 'org.apache.cxf.jaxrs.client.WebClient', @@ -635,8 +641,6 @@ thirdPartyAudit.excludes = [ 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SignatureTimeStampList', 'org.etsi.uri.x01903.v14.ValidationDataType$Factory', 'org.etsi.uri.x01903.v14.ValidationDataType', - 'org.json.JSONArray', - 'org.json.JSONObject', 'org.json.simple.JSONArray', 'org.json.simple.JSONObject', 'org.json.simple.parser.JSONParser', diff --git a/plugins/ingest-attachment/licenses/commons-compress-1.14.jar.sha1 b/plugins/ingest-attachment/licenses/commons-compress-1.14.jar.sha1 deleted file mode 100644 index a93cac2243e69..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-compress-1.14.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7b18320d668ab080758bf5383d6d8fcf750babce \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 b/plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..93be07c90a41c --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 @@ -0,0 +1 @@ +7b5cdabadb4cf12f5ee0f801399e70635583193f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-io-2.5.jar.sha1 b/plugins/ingest-attachment/licenses/commons-io-2.5.jar.sha1 deleted file mode 100644 index b7f1d93e89702..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-io-2.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2852e6e05fbb95076fc091f6d1780f1f8fe35e0f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-io-2.6.jar.sha1 b/plugins/ingest-attachment/licenses/commons-io-2.6.jar.sha1 new file mode 100644 index 0000000000000..75f7934c08267 --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-io-2.6.jar.sha1 @@ -0,0 +1 @@ +815893df5f31da2ece4040fe0a12fd44b577afaf \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/fontbox-2.0.8.jar.sha1 b/plugins/ingest-attachment/licenses/fontbox-2.0.8.jar.sha1 deleted file mode 100644 index f8abddbc755eb..0000000000000 --- a/plugins/ingest-attachment/licenses/fontbox-2.0.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -52f852fcfc7481d45efdffd224eb78b85981b17b \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 b/plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 new file mode 100644 index 0000000000000..4ded3b5488825 --- /dev/null +++ b/plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 @@ -0,0 +1 @@ +f961f17ebdbc307e9055e3cf7c0e207f0895ae55 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/pdfbox-2.0.8.jar.sha1 b/plugins/ingest-attachment/licenses/pdfbox-2.0.8.jar.sha1 deleted file mode 100644 index 1c346871e2119..0000000000000 --- a/plugins/ingest-attachment/licenses/pdfbox-2.0.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -17bdf273d66f3afe41eedb9d3ab6a7b819c44a0c \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 b/plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 new file mode 100644 index 0000000000000..9bf91e07976c2 --- /dev/null +++ b/plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 @@ -0,0 +1 @@ +d0425578218624388f2ec84a0b3a11efd55df0f5 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-1.17.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-1.17.jar.sha1 deleted file mode 100644 index 571314b3378da..0000000000000 --- a/plugins/ingest-attachment/licenses/tika-core-1.17.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b450102c2aee98107474d2f92661d947b9cef183 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 new file mode 100644 index 0000000000000..ef162f03439cc --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 @@ -0,0 +1 @@ +69556697de96cf0b22df846e970dafd29866eee0 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-1.17.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-1.17.jar.sha1 deleted file mode 100644 index c4487e4970f25..0000000000000 --- a/plugins/ingest-attachment/licenses/tika-parsers-1.17.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4277c54fcaed542fbc8a0001fdb4c23baccc0132 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 new file mode 100644 index 0000000000000..6441e8b64e7b7 --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 @@ -0,0 +1 @@ +7d9b6dea91d783165f3313d320d3aaaa9a4dfc13 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/xz-1.6.jar.sha1 b/plugins/ingest-attachment/licenses/xz-1.6.jar.sha1 deleted file mode 100644 index d91cd44c0b4d3..0000000000000 --- a/plugins/ingest-attachment/licenses/xz-1.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -05b6f921f1810bdf90e25471968f741f87168b64 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/xz-1.8.jar.sha1 b/plugins/ingest-attachment/licenses/xz-1.8.jar.sha1 new file mode 100644 index 0000000000000..7455feac7983b --- /dev/null +++ b/plugins/ingest-attachment/licenses/xz-1.8.jar.sha1 @@ -0,0 +1 @@ +c4f7d054303948eb6a4066194253886c8af07128 \ No newline at end of file diff --git a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java index 97ca1c0b19774..6606d1bc72727 100644 --- a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java +++ b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java @@ -159,6 +159,7 @@ static PermissionCollection getRestrictedPermissions() { perms.add(new SecurityPermission("putProviderProperty.BC")); perms.add(new SecurityPermission("insertProvider")); perms.add(new ReflectPermission("suppressAccessChecks")); + perms.add(new RuntimePermission("accessClassInPackage.sun.java2d.cmm.kcms")); // xmlbeans, use by POI, needs to get the context classloader perms.add(new RuntimePermission("getClassLoader")); // ZipFile needs accessDeclaredMembers on JDK 10; cf. https://bugs.openjdk.java.net/browse/JDK-8187485 diff --git a/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy b/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy index 0cd359a99731b..bcc5eef3193d7 100644 --- a/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy @@ -31,4 +31,6 @@ grant { permission java.lang.RuntimePermission "getClassLoader"; // ZipFile needs accessDeclaredMembers on Java 10 permission java.lang.RuntimePermission "accessDeclaredMembers"; + // PDFBox checks for the existence of this class + permission java.lang.RuntimePermission "accessClassInPackage.sun.java2d.cmm.kcms"; }; diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java index 598d3f4e8175c..654bc361f53ad 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java @@ -214,6 +214,12 @@ public void testAsciidocDocument() throws Exception { assertThat(attachmentData.get("content_type").toString(), containsString("text/plain")); } + // See (https://issues.apache.org/jira/browse/COMPRESS-432) for information + // about the issue that causes a zip file to hang in Tika versions prior to 1.18. + public void testZipFileDoesNotHang() { + expectThrows(Exception.class, () -> parseDocument("bad_tika.zip", processor)); + } + public void testParseAsBytesArray() throws Exception { String path = "/org/elasticsearch/ingest/attachment/test/sample-files/text-in-english.txt"; byte[] bytes; diff --git a/plugins/ingest-attachment/src/test/resources/org/elasticsearch/ingest/attachment/test/sample-files/bad_tika.zip b/plugins/ingest-attachment/src/test/resources/org/elasticsearch/ingest/attachment/test/sample-files/bad_tika.zip new file mode 100644 index 0000000000000000000000000000000000000000..58ebd8411edcea1a51091369feeddae52719f1d1 GIT binary patch literal 416772 zcmZsCWl&sA&@KUjB-jRbhaf?NTkzoSZrR|zI2$Ac_dw9aC0LN)7Th zx9a}6U!UsfQ)jAX`cyqV-RGG(nks1ML?|dISpW3@N;)bEiVqqJ3N{Kkij})Jm%WRv zoiFCUz8CAiP*66t-J$kQU8e56)M=Ley{*(KQWZJ!1Ow+HD=U2|o;UjTO#2C61z**- z@wf3O%6yFeA}9BnBPL_fj7XYj>hmrsPm2RVwcZI4y2>-U$Jh6VobRTMj-CYF6Lx#X z_^;r5i=|MAq*QJ~JV|<+`3Isf?7L|7q$-Sg5{7^aWj}Lqv6hZe-s?;PojBF%{KW0g zKZ!Z#Xtum=nr;G5pLc<6Ct5FY#giQPV0rl&UXjF_?1&SwbwHWneEQ|^_6xdg zrPie_@oe5N zsPhsdn+w&Sz%^aBfW7JO=M?@YNw4fLBh@?{)c{kUbp#9T!7Z6Is~!H+DldMn7CRcC zsd7dv{uW~VF>w`hRmh!o?t(>`w7-{%D{XQ`^7{QN5#N%-_UwuE=lT`bj#=Rz)Q;G< zmXm21kR2LXC9eCfjF{z^c_Fs+Ox~LaT&zoi7@0Y3X^Nw1R>rb*3)7aaQ6hRb1yGgp zoT5HzI9&+^fN3IO-7v7zM$`?LbH_O_^iRN!Lh{G0p%7oh=G`oz+y96E7svk^ zkrP+}!-Y%7(4Vu-OlKmmwtM6jI1Pe>LmZpN$&$u`Aw~YC8O-Okz)K-*L>0=M(Nf+R zv8P{BqR)cSZrW~&^S{iU`7grkMNv@R{4aAmf5QH6YJck&D9+TxU#rzO1oXp`^0=#CVspqIkwg ze;34A%ka*4@Ml8Yb5K{|-jjdAGtggL>ilm1IT(C)yc`F1g`0dMC+mxn=s0c-@NIkB zMTSEf=i*XdJ(*YTTJo~fEAhRF=Kakucm8Z!po!F?o5Qa^)m~2{+|EB*z@99P$wEK~ zr+G$EoSt9R`m)!t>SBe`II8Q*Y!jOXvAYnTv#DL@WgdRCbZ&)U2H8(_%~r)j-6L|1 z#TQlYcYU{|o9+ao*Wa`tM`!Pkp6x&!ugqH-q74meH{t0`BtB0>Tz=yI>$lJQ223D6 zZ#UP~s-Ke?Q!OvY@tPr~U}s%pr4S@j{o8=;gf`<=%6OrhbB5d$3+P>u+8}dCt_4Jj zO^h;-mFw@^hUTpE)xlT;*t~zKI=aTuZK)w-`}j9>d4+o27-kJ{AD9H`?by!Yqm48MBPK{QgL_cjDw?~k^0=2nBU>El*YLPbtXyq z*~fYc-H7m&h94`kb`nP`Q{Gk6)XC0!`l3?2Z8HtfkPwY9ix-781>KfR$u%LZf5u<@@{G&0YA zJQ|}s+A9(jggQWT&78>ZRZ4oJ31d7|JRDnAmIvRL3zz0NDq=;U?7?YKyJwYSG@Skg z^2~;|Bl()RBWR)m%vS}obig(8lCeY5Uq{reKyy1#q9slI)P=E)>6$QY$q1`okRg7Y zxeRTTxlAeFQM{wv6&J(VJYQcb+nmuWiQZBn5I-b`AOP4)gYv9dN7_A)F52WurTgPN z-x$qBc@Ox)555J6Kkkls5Sqe@JCQCf!;_0Kx=#MR>sv?pu z3P+VjcUjExvCphGxG@50wFnT8*zKKMz%wb}jly@1n41U59AC1_)SQW$VLr|HcY~E^ zy5V)bi6gRKF#HSmx|zdA61U)9GgDtVPAa|8FueFh1Ykes<*ss+>W@T&)tiDy4%gu8 zcL||qtmRy_(r-*kCerevnm)T}qU2MN-Gr^ll9H{+FD=OiMU>l0q4_a}PqO;ElrmM=L&%9itGvE*g^} z23jR1+Axk~D#m5(QUOL89!ijpY1h$6k`+}8xR?mb8*@yE4Q?EO4Vemc5 zbV(A3yeeBTIN2M~{>cS9@Ttn( zu?2K~h9YRp|9+^`)b~1DCXo#rxy0XXGAffBVX6G}(vvX0uq~>`B_<4@IZFu5Gyf^j z2NC@j9^Ft779rgQqC+i#y_QK*du>Sf7n1`E5+ ztljQ6=ZANW%pfi^YcDI*CxI9)dO4mJ|Eki2WnC90P zc}uY#2Uz@iqEVPx8MqGo94Mb&GE^$uqbHI?{GthwCPw~zF;{0O&FnEJbie-=0@wl;cn z8>%+}iPpO!>1rP*Nj0K~C#QgVR0{zDIWtkkLgo86+-lLr*kf3ajiEX3xbTQKgiHjw zg?TQP<-aiBVwGE3Dgi!3@L0I>{I{HyQ1_st=VQ@0m8138hEoccq>F@nUi1oYLmABh zOc8{4RA$=^OD7AE<}>MjaWY7>EhW4Fs%$a6-}FOe+HoswvcvYLAb;@3evLzCWf$R+ zf3x8~dW{KgEh0^IHk+h3*K6L?91MyJFc5@*{mR$J5Yo_Z%o)E-=15NFy!or!?#SL+ zi>SA9oe%rX)4esEV5Y2VmDke~p5Uo8fmMgq`W%gCB=>rgP*?J396E!4mCtXC%b%ou zmjf=>f&5@Qd}}LbmTCH3&))?}`Wf#?_a03h{C?X#od|rIpXgAQyia+V{->c&Awa!R z=*t)V-Le;m&;xx;`pQYYuH>p}l;O+|;j>r}l(`7{N<~YQmY&!$+m`>JP;^emNCRVpTlK-@H8%k+B&}K+08|D znm%JSp2v7pYY;z~%pZ#L>{+V_%UPK0atSbI?pUDH_LkIKtb~aRa{N(99V$^SvfLD1 zSX-m8%0z)@cnx211J*GX^>^6t;sQ;o%;%@Xh4?DZo8j_>u?Cm|of*(Z1CiG)J`I>U zVxOcZLE`8Ohp!yOrnN4KEYmT|PK_fkP-iYh>21FluQ`5MMPM2IW%KEJp%iTJE3Hc# zvzikrv9m~6)^_4;7vRy|or{kWAlvgo|4;YI6G`RiF{oG(}jA5#m>`y39m;NW1S55AUpDV*P- z!Akl<0JCX{@i?lGA}>W-20L6`Mz=IG??9I?dg4WPnshh3R&I+`+XK8i&-}ai&a#yI zNSgHdLYjiD*=r7JQs-NgN6@6dNxuAd9QNTkr z|Gkd$Vt9N7>kIVntENyoXq7-~?bx4n%MFll4@NjQijV&nh}b`LhCwZ^f<6(cn>VA(Fc5$PzE$kG>%V;>R2_durxKTD9dJK=7lc4;|NdSM z!qn!bL=W*KHce&(Zpf1J<~^K`?dkpPmf7PUm}%?WEB%a%iX86i-%Q+fWlTg;$NZw* z@C*w7gQbYV&WzOyP)E@fP#}wPBl#jAPZpzz{<|+dx`saLFG2Oms2pxLaU9wD#66{3 z(op^0+1B87jt!DAhq)nbO1IjiooTu6$O5sf;eY@yOv{_5RG$uaS{np!m+C5qA9P7Z z6a?yYYd#9%Gec1jdJLQjzHBJ_!+HGu#Y9M^w^KA8Zac65zezie0^Mr*uWu?Maormc z2RFM4>n@v15#Vrs96CQ7&oYZ%{BeA>@Wnp03&jt_PRrz2nXsewAlsXo_Oh2j zX;C2Op;06#=dK{py~}&=z}(#Ip%69_QkwRy0uK-Bl;Iw*d9lhmoh9j}hmF0y{_d4#y|(MS8O<`FhV13271SQFS)PGDOXM7E78V#MU8Am?ZKcw6kGk z5Qs0vq9u3xtW7JbzF}%1$u^(-nTnoVmANnb&@GxZPsz*7EPg%H*WKYpF~Yj~aqugh z@Oww12<>{`Rn!BmE9{hvp`3RqScP)c)IP5SmD%Kl_IjfzuX>s0Fi42D*=SflPeyKG zMFX*vE7{`neM9nQ;#2iSBlxs`ZOM18GEG6EGxvi6kj^fVC`fUnP6)%2;HsUr9cS=B`EE z-&m+MODRPfv$|MUJr|V=Mt#MypbgR>(ZBG~=S@c2!XkF_xXu3gqbmvTu{3K7G#Dd-!UZ zp_XEnAh~~yTycKJ*sT(ry$X9p8n}4U7BKHDHdtvUmgQ+gHDX&Jr~hun1SYYv+5Yzo zPQp|@l738uFVIs8HMJgvIfTkn^y!aBHs~pzmdvOHyqx)&7)fnMG+5@1c$gi5iSN#- z38W4lxdZGCN_oR_iCLTc{5p}CZ()pl*VNi11z#YugmZZ(;7 z&~_UbEn$6HigzG5*MpeFrP-gqHrZDx&kvY!Ys2%0S5zlIM3t}F)HVDzAh3V(+uX{O zrAL}g7uD-LO+2nT*GyW_{1llV1pOejOeFs+4BfssvN)%D^6y|w?_KOB#}#e^1Wa7b zE+}QIL^g!6_RABgV1usne~?G-9^8k1G!xYt%e&9atB~sFj6Rnk*zL4J2Q!Xb8We$u zJLP^FmZV(D>cjQ|NQt>jh)?vAh&-F^2+>@KZCO0J(hHZl`Dz>6BCv#b+l#A%WPjb8HKiD zlUeUiP{NsL3L2`_rmU~=)mIlit(NzOB`E^Im?5L@P8YvUw_)bj^516Dv?bF0VX~+e z`A7kL>vWO8^{~UvwzMAr@--kIppcC9`h8J|-ey8jzq-vfg%;5YNcyR$DK5*?let9} zO^W4k@>V_1uu``3#$Py*;rc$~?On6NuTsW-q*l8cPaSoC1`#d6$*zn$UO#IlEv3TP zAgOcDGv3UzO(%z} zl6R~yE%^npp0Z3-$*8XvOZn45om(H-ra(464@=mOXg{@IHUEPjtlPp0E2&nB+ozAe zq|9)irf2^%HjG}BguxaRSt`Zf(J{+516*=9`@aox_n45c4*^;;clSd>Lhaw_zlqgJ zA^g08@jE!H^=Ph?B1am~*ONESG$(Bl<>kh$7=}dq|LIk~evQq-7WbO6KMU69IU}^Ur{FFj2sbWNHw;pkIVxH{D%y`xSzDJ=S(#Itll`A?{=3-u*x8lYIr(&z*|oJfIdzq_ zD=PxeS28?(K7iM+g7*C;I?gBl2|^^`7J8C{+(gN{hS!;u5t6{ ze)w|QS`wfCVRY^DBLPwm6D?ucrOjUbBpM|h$8{xY9>-^UMkJI`8Zy{SOQaH%U5n{< zq7#c5IJuhkP?E10%nNTGj67BP+VQlc{q3%4HkhaTQw-(jnD+U3lEA`P%E8^l)Ts(wGnbQ`sAFA`_6_x(l;t#&q@u*|eJ?6J^~QYfDH#ZVhGW zOf!G(z!v?B9+#5$Qjg{HPK{ZpU{+q3>R+@Ls%vZFYe=>9vYMs^;k7yT<1Pu|bw-C- zsJ~xp*wVj;|8n=Gkxd(^RFAc>l#q4utv7(ssSiicnkzleQSWbyni(yC&?^$Vb=wM0 zRQ2U*P?5^t&xAAU=w_VK>yKmT5^au^FqQ+&SrXTcJT3Omw}@(uRBX{P^q?8ynaO8U z5}50bT!P;!tnUgj>DP8xO#iVgWBh}Y3_Vdy3nv0zR0oZb)?H^4am2@jbd$kW=T)8_m~!}+?zFO zo|xis4tv;Fm$<78Sbb;tm630)DtBDoz5EXD{6c*>k+p_m|7Q6rTe7rjFgEE}{KB8A z^|*%vX`*R)461B4`9oD&^@OWavU1o0`dm66_Do#rCqbkLI=g!vqV)&`+bql zo)_}cB1I%)O+u98$Lk*CSoT%{J<~N5wu&h(7+!cRxgP`05v(c7F4<1uBUE4Tmm|Vi zSa$xyaM5q8euxl&-9#=@NVweC$W}7Fq&`h9iWs6PEyut#P@(D_=on=eGyZeGUqNX~ zjuTYEEPda=>T+U!|5l#=bZE&X{wvD#d28*HVvB6v`1NCb?h)d4dnc>)y43NK;Tt?5 zIXZiunbCSbCmB#ul-(q|k)xp==lcZm`H4Y$mip5?-s|Ii{Wrz{^MvWtbYxs|b7vI3 z=T`SdLmb&X5LZqyy9pld{J}1K@KdRCH&AgpyD`MGD_+e0xYqhd)$*U^>K6CbXK)dt z*H43eFS*bQsp;BXm(1a2 zN@!AFc!bx@6fT=ElN#rYRX)8vYLHh7i9hWq`?ux(02t*T-!$~&2nIzCzG@o8861CZrdRAfl~x=;2(&gk}8 z74rjLPbrmFL38WZ*-k09@EX>lOg+5L)q=7YtBYa=wSDv6x9Kp|SZuJYf823Vo#5Yj zn@F9pk`Mtk8`uxL%zpuRVxpj>O;0G@RkH?|ZsP%te$n$yM7|gD{?W@Z71nx@w-(rG61I>0ilHc& z4plRkm(+?zPPxoW{K%G`nT$Z#3sb)<@i`Eo=ShrSXIE(HjYk#}3D zZ`?QMp%%RijR|~k-r-F!4^lA#vZfUux#1pW*dG@S*6AhU)_3pNL`K}RzEd*W{hZ?9 zJ6JaH-RH0>hB_YHch#y@a#`|HgJM^hoo2I8br+Y4km5*~g@#cfV?<2Q?NL>+&G(wh zj{W>Ae|nK{e9Yh<%O((MGU$mEI%yu$i6mZY;UB78{9Sz^uo$2bi~^HGGKw};^3OrO_q4S>NlX}mswnPRw+10iY?l9 z&#`x=>V-sL>#3I<&DnvtNYyRK^RUmq7lU2lAlp3GFKiG&`z;SpmS7#drd%N3`4V%C zi}!UZYu60Ay@Wyz{T}#HTxTM-t?!`_Z*aK9WEDHQJ=?Ol@Wu^5d;&$)X! zp}kWp0{v*DsvWB#B?JyF{>^2}zQbobHEk#`D149%SL!S2Uc91SYt*FNwQa}( zM{LiD?TJx6_iZyY4f-ltt`ra{f?;G-wnwK0UsYUua0n6zfoBvcZ8(?Ql^Q(ZLID2E?+bkZ49%%@&w>cWVC!5|ZSC zaPPCY-zH4wMM4>I!6%Wi_yhxu2Fbd3qCiswV53B(L-<-CB?*;BdH`lL7i~X;6 ztH!$YVdu^H|GvV&y+{%meunQZ`yQnDp1bVvL|97QfC4ewC$-x4Iy>qBi3oRNrQ$j{TlJGkwU3a6N^|dN<^xpOaxr7f`6q(K8?ULx~|G z@M@d-5QY=YDR| z8*{;})NrO;{XPMOKG8S4m*aZCUy*-xJv>3E7r9Qmk}lDe79Un8n~I+e;5w}E-#I3^ zo;XN`t*+zYJ){s1z{Z;Z1v!kU=IfHC&Ljl1 zoB<N-#of0B?ef@*(OHjDI&&!07hbDbo3*)k-KXz?G>=}#lU(m zCnDou=<$BRDJP&Ry^xYpHE6KPW#B4lCs$7NsNv?}xG^Rkm*V-$@|EhK^Yk7!=p;F5 z_q2Bf&;`R}=-hnnWX`tXkArUSc)HgNQCFT64*-cDVlN~cV+n+dl9vls0}#xRci5UdsG&-S}{k5)qHKkCx! zKWQa0oEJ|KK0%SG0~L!%FYnLax=i}$-T=~u@#R6SbVsqZf+i-SbWm5MbJuQ=QX5P}bC$b~$Ed>tdTu3;KWBs|Dz<&n%B zEhD}AKZPXLk^rN|(Ec08RA^D#yCqW_dUetc@Nzc_rg=l-vX zb0w&CzdUf5faneEqg}Q*tv)3quhfZYJdHQE<06Tf3SdXE_=a*{0GozS)5osvsS>NY zK24{Q5R*Q`H?Y5TSr99L)MLh5h?KZ11#p<8cRlAGI*wcO385BT!h<21b0?zs(41_Y zIUNw2nsBF6SOdzc1PaMkGSRT_2_TBxxxjglZQpvlX_?PwlpW zw@J`v6~(7t#`ueB<;pSjJ;ANoYVR3dDfJB`KHt?xvfb*i4=hYa3r|NjN6D#Pcc>`5 zTRmm*JTn9DqWcKKHyzQ7sr`ZCq!u|fCW$%dH=_LZfY**7a>_9LNVX9k|2Vsb=XP)>w_Q1e5nsEa@7ZyaC!spAaz;BYqIR6^k?vo2_R(FehQ z+69n`>8r`QSk#z8;PV-=eXsLNoKVi1Mc{kHkj)tNUP>#Z-%PFoJ@DFvhYsrcUjFYGA?SM2iTDgz*ZUJMSW6Iv|v}*@Rng)pmJ@=hRnb+NP$}NDS zwh|DE1tksgxfP+mRpY>}*1bc~h$2-+Y>2^ zeO3`jj4kA2>A>eZqW-8CF3?d*vGI@RlzA#s4mDlWmIXiH_B9no#dmS3HUV^YA*V6r zl~bNB!FNTwqP=hdUSKefsX~1@n(Bjc1!(%qzcZb)3VmTr4DUKhXk(f$bOF8pQrL84 zBm;o|T(!wF_<3^u0(l4PS~UIX{k#eXZP8x3*Sxn!K0@B?Ry=Ev?iMQa-L4Y24>$BM zkkr;8*@(ps8yHF`0r)AA^A?LM={K+WCGB?z>yFp9IRh8UA8$mfF1lhjH~X2ax8TLE zsgcTUx}yci_fap-{;p? zyV(pK?$_xN3oX+^W)*gE1wom>Ei4?a(b^roc} z4zqE&6VzLMj}_1nM|6cUtil#)Vn9aFlKB#A?F>j?GUkgDrZc zQtlxxBly;c?I2tv_huW^3K0~v3I`H`{Z+?-*?+5|k@2Z!DH%w!Uhod^0u1)pg`8Y@ zS?Ij5i`on6EUoxbt{}%2#pAr=)W6<-YE0fLPk~U~q7@u`dbuZM0FzpSo%qIfN#F3o zYx|^5*RzfdrzRyMy2=gCS%vfCf>z)medmO^YeR4g3P|pb=jYQ_YAVktnEgYPIS-+Q ze?+mwr$A7m#!lGFsn5lCyyQ$82L+O)qj}^CeT+<7zbp2KPP&DeWnx(ra`nTT003DA z^A$awbb0vdQ>wyejq~ZKg{9?p1wv8H>n*^SF}^A=Xq#vk{(Ma-E;2z%`&{I-2v*A#HGMBJ&6${pqZ z5SMO?x)o;5vVu?>Z1v=`INIG?ZyKHLqFAt>^!}hPVuE9~{Iz{@k1ya%eYJfb(6XO4 zIVg^79(vOh7~2a|T_%lxD#GVoIOUyjw}f9^tHU5Cx}H6mIr%?4!l)lg&gX$E&6tXqW9l# zw}glXaAy;qoUwhSfa+et?Z;kgQEggo^JfK0Ry|yL+`8ZLoDbk~bJw-mEY0^)?>N2E z_V1ssQQ39~^iP3~H2SQ?#(7_x&soWCdsyeG;VhbTe%qhzP!#Xxl_;_H|NRFEYFes@ zZO4B*G-dH=9g3lGp~{o{a_3>VvSP5syM4;Hng#k|R8+y!cApF)zjNi{Tj|Z?O#>Ro+R(TEyUlk= zsEvOaqg^7}8ue}bl$TRvV$!S(?B%2AAt~>1xv>oKJevUt%?9qnS=W*$iZ_uytm7sa zRPeqknCHM1>7}@jn!b>W;XE1S-46eD)$G*HnnR#qlE!GobjKnv0_KM3tI_{s6TWPd%;;vr?U&jF;(yK=x$=MQU73|WCzj51ckq7fb>rxCp+{$l~1`N7QNn zW1xWqh*E6VS1|>?f^#6=YfhPNsQ}@T3!k6)`x8)%XsuiFH)2K=H1H++<6NGdDZ)mY z(N_{KERM6JNYIi$r)&A(xpK|1^h2P}jFO23&^q<%y1~~C zyZ*jUu|(Clr*i(Jj{A&JwQc^|p|XD~j&v z+tq&r9D_!K-dUC9Q8Lm4Pe}3AKojO&D~`QO6hOs6D#f>f1E8@@Qy|00QF)5)huhlk z$6^?PnL%Mb7Rz5WJhx#@l0dZD?3^p~XIx(nF{G&La;7r$Y>-={b*8t`OH40i&LO&dISyk&UwA*6m{12++DBh3ARbs3WsY}fPU*d%CW=; z4n4etdiB%-nFT8I^uLbQ)ud#|C%OapQ5czL?iyK(cV?5Wn&Qi7gQI;UpF-}9JyQL5 zpw_lUN!oHrfKT@w;D@=pOQ+7jvFGMs7l~@c)gn}b!|$*G?piG{n}G+bbF~-{PL*`FBfN!z->yiL zI?-2ssQT1Ze04B*=7k7UEcLo|w@qGISCXU2x4_b}ROHPj({2m(Z*zDxo8$`eEYSF? zbIO}m`H*89Dw#`jN-J*tm*|GJ;k2j_v>6uRPZU)~2 zf%ht{sSs&TyMy|7UdlAm;w~(^-XBG3=#D64<&YNyP`@KfQJfzX_EAKtC%Bg=rVlPO z`hRhzI-^_RcYoTTi3oV*QZqXgGuu|GU^nTu@zh1Nz zyP2nos<$R0S%$&h`8ekLn12{ywBXE z=UaUoqhhF3jwL4|d$2xd!j-yY^=AJ~p+wGAK>T>?B*$Wyxx4|{p2`q=@YIz274>hv ze2=u{tM`IWxoJRytY<0idhynUlXJHKui8aq_vvK+r>+aWCXhy((}r{HWiZ4#P#?+N z3pYu%Lb#^44)g!BJ`q%Fi#$k)ZT|LpD@6UkVEsQ6G_apA5%CbCmFOsS?fV@2x@VPB z+IC>A3R;wZkekPJp+J3MxfeGmP8)dsMhKcUX!cULrvCIuAU;X5Iw|JbJNX$C+?KjTT`28>9m-2O z)|i8FpBmzXB($<@;=I;D2|icn45O#=_Jt`_vx0Ni^{|`@W93)DjnTkKU!B>AHM7hS z^4niYcXY!sCP%#KK~u5N(;d$_RuiD}vQfHbcISPo%@F1hlpMQS=?YG+C zdsjA?Je=7~9grd9k@dp6$aqf`5gC%TL8*^a!;*1n8pc0toN64+Hw?Q3;U>gdNDPQ3Cw_~s>9;`$7oHhOzVvaMsuoC+ z(_+Fmy$%+s2&mnHv?EIy^zU$bU0D%!cjr|$!T9~_uId{xoJc5-5SWnM#Z}A~K{?j7 zddcevfP|9+WUYIhISUpd7q9@=TW-KGKZa^{AI`=_mlm%&!kY1~DrsCE`;oox^ghK@ zQDs=6E2g-Z@F}hgwZ?g3x^q9ozaIk}erP%_o{3jYe_(z0 zesNlhNqa9&0HE=(@FYWXrY^w1qTQ#%x-J<*2m&k>X1`2Hz->l!lX2|mj?Num3fZ@B zr{p>WOb*(_R$tTjz*%VIZB_shil9ZUm*qE9ldZf8wU`!Vx2m0kH#n4t-gx)BxnV@~L5!L`(>ue|GjrLNgDtM_>&EQlPS#`}c*kyj8jUK8 z%El$vp}Nz}ynPs)q9$1zdbbe`^H#$q=R7w_pYi9-cI_kyB*={1(#aNRbb3eSkK}Ws^I+syq(x?p``0 zLhQ*J1P^bTaPz?=w$|Iz2rsAQT@X38wjC2|7noNDtq(xv=_r&O^CiN`a!)m398X|^ zMdfzOx`fQX8o-6bsb%i{t`*_;u!5!Maqg+LiPoW=__4Pdx=UwXCnFmx*t`rA^Z*Va z|65x?7?&t>n3w`3AgL>7f1Cj?78J9fay#4dx-OtT@>~H@Api9Zz^!(hj|3JCfT2cR zbQa#TkR%n#4T<)!GjD~xMm{7^hJRI%LyZ!K8{a5$S|61g#~Lss*4H6uKg;1T11y{n z&)OtwSGoX2F7cd5i!rV#+M!PWHAQe&zp)dqB=L@p@U1eh+DZ|VS_bm)^%$~w6H1Rb z6{Bqrc`z{hoSFpMB4I9oxkCY*40Dk`lo?2xRNJG21~?EuKc4Sqk9_Ery8d+>v^o1A zp#1uLj-fk=fN`teCqcZMA()ra8cdgUg5X_D|21bqggntGl^Tz3x4O5WM^y30JEdN? zJ@iuX_FEvjFVA`59FbC|nJ(qphym7+K;<>^U*dkX74TI_qwm)vM#p~UrggnHc>Izz zi3vg4`i_^>JKMlOk0Dm%y%5BWc3;ZT$9d`1H{2S_Z&foXBzzab7_F$2f3So^oem}X z907heZUOQPCShX07^lNZOr72Dfz3=q^lMYKlt-Syt|#21^$O8KN=hovrQ9ka)q)!| zv9w4k4d5AnfG_1xLn{)^$gUEq-!BE<$4_^!Jf}ds*`oEng?Z&70+;T^6(l*X3fe`+ z9GZ%h;#)5yH;3<9KPAr6Y|;hOe5!cM=$sa$Q3N=tBeM|AFI`xLR0s-bJf3kMraZl6 zYdb^k(Wp;!ePX=;@bw2@kHxJk)2?+$2u<0xFZfYg2Q<53wgTu-<#D1;GZBU4^p+4R`ya6YLPjSbEpyrCA(aF zFVq~;8|yY0n9umoa!0fO?^JuE<`az3Z{}o1LbYVR#ysGJjbzd-7vSC-aSQUj_S&1*>q!#dQl3O4% z&1+RtA#GFadCrV;JOQJx#>oV~hp5^6!bQKyWTg2fE-6d6A|W*_>sv?2TpM)1j(Yoy zfE#^&?fmvZdVp|aK)Vt8-S9BSe809kO zK)aZb;at#`P(XQ6WFz{zv-h6!tn#C2qS8)HVd0q-f`#-#{~)M83o%vi;Gz;5Te6CHu6oy{$dfKI62eMz!)Ywv@?NFkG*$L#nx z=dO7_T-?ZT+gk&0Q-yJ`%OLY^PT_u)O#1>@XK#R4kDVgqS z7|ZjlS%f@Ybx!kYh|Lz0FimWfe{;8XO2? z)tF=WqgJdgW160SAos@ovacebxz5~H7mo$f^zYC@YX3bO@8yX%{}?i9_Qus`)*HCD z_aTKg%qPdN2QG%X3STDx!&k0ewUDGyLYjt4Pk(<)s4Lq4Ui4B2RQjvPY378~e|^15 zkuC%`o%R?po_Y8N0E`bRW&y6z)OsM8OvCBSjNX zWmsJ?{`2ru>GcEKvdwMVT#A{e>|N5-C|m|6W+5t-WfL@dK}v{BFqvB&g=_qXYb`T_ zYOFNWs<2|s%#y+pp!BAD)MV>V1;8@1M!xEJ&FCGjQ{RszO>qp2ANyJoRE1OIeK`<^ z=WAfzD_Q{)L;Mu8%OBsdD~cU$LA#>uG@@{EsY{Do`=ipgm;d5lh9P+(P+Orvqb&pK z6_^yXc5~L`T7q2{9lGLoS3lL!BJbRA3YruG|4OTk0JFm^uAbldcz&m;5n5`sU=2|o z3DNt*Z+IuEpzM?E(VLazY3K0Kxj|yB3VAw)Oj#OouroKIz>{fl6Y?6oX*e<>J2aL~Z z@>zez`b$}t6t(Yl`hn;s=AUi&XAu8eFO|u7U+thBM?Rnb{!KQA(K~1_yWsL3EMM)Q z?LquAM=o8${C}&hr(eE&ksJ`!V797$Pv*vR{y93F`9D|uGy2y$S$>J(f0_7K{G9qV z^FOBe-!C7M$}M(=;a^Vt)8{R{h9xaf{IOTZ$*?ZQ@J}ZGoWj+YvdoLQ~!HcFn>42ztp}snQLK-+8|37#GVz;+c+n7xc9Kma6~yKEpbmDf8h2Q@3`ZZ zTNk@c@C((h*G}`w51L3^2GSL+V{%xj+ttR}d(%HTixV-&@VgA8XY6#&X8z0F{L>vL zl85Io|9OVrWgva=_kT+Fxu5ZI8AxBfalN$fN6ZFQmw|NRy`Nmmar~%ur(O}*T^6mj z82$?4pT7AjX;U{T{)58?`0+#exZ%$s{ysN-BNLOW6#tFIk56U!3k`n)@z3k)e}hb9 zD*ncvp;FLe4Sxvnrwob=WB#FPo9>!tejdvO-Carf?vwsD*7Bg)g!&s<7FTa{%4tbE zjZY2v+`8zi3)trawVlzfaU_~A3 zCe3gh43Amkeycs3fyH9tM>GIPHLhE_v@*B0`wS9x* zGcNe|4F8+NKgj?75KiK=r0-Ej*ccVnmG8u+yOkSif)1jf-+jGERysNm^tHz(9`o?5 zF#A2R*s)o)?YJv*dHm?Kh`z=xn>GxA%5L4CQiMs`isU@R?6MtDj(`%pxi2 ze?KY{OcU*eU{UWCE5V@H@%Gx8C>W0bB$J)OO8ln_n6uD|cp7W)1OwFedOhmj8j` zztd~Rz4E{t{sQ73-_$DIwTBe{)Fnfc<$*W+Da3!)f3MDz2j1|b6Z_Qh)InW3u@S_- zL{x(82)6JD7O-S!9z8P%)eRf z?>uzlCFz``LbHqWX_tBP)!VaK#RrVfYVx_kc6cXCN-;i*$S3x@HkpvW-uPTgKJV=e zwYiUg8)r7OLSdJ+RiCci|Jtt90c*RFpzVKGZ0EM%u#elPnT(GEZnDW!|Gke%;1d#@DAg%cn2A1J#XuJ z1Rp)45tQU^K6rxOij6+jy@|BQf8LI0CM2EpE2pMfZx6ld)n7RB zcMSjQ#D9O}>~EMq-}pQVpID}9iwPUsNiA^;KGQM4>DArcFwZs;W{~Wa%L3=SEmAIZ zheD5O`GQ;BW+;J5TqiS>UPhda#0B=9u#J^EsP;@Ato&v>CvBJ6c3E2s{=R3gtmY2q z$Nfzd<0ek(>)uyUWP&_LL0*jMHG+ezGd^p`r{{?WrO%nFBA>o6SGrO+82*{Hr`ot6aSjOf0ael>Bdb8{dR69h=H{Q6;ofFTP9hr|)kcJ0e-o{a68lP3NU+`p#6lg%FWSnN&sAC)W|33cKUEH*^;Hh}$<>UkRslA)F z($ro>9XWj(_eqN_c|_5wy~k=ZZ~N`vWFwGfJJ1xsWC`!K-Y4^GeXaeqr5M(&qaFQ zZNK@6%iO`HIL`L5-m*b%YN*w#8ravAvqxs6=_9q9J%@kQAYnt^8#I+W$GeTsF7jDh zvm)A2pL0DMAsWvQYp8MP7V9{>Pxo#Zcg#3p8&LMA$EahTWf^Mo-=Q)043cVMHvi50 zhI-MlGiEv(WLPv+?f;WDPtQzlOL*u6FG~j+DMLxh?1NkX<#O$*V%_&epD>QK)$Gta z@HIWaACKD4CH{k#sRzs6eQF_>c%|C!cWJM;`?2iTjFgv1ip^_^EqM(2Z z{89@G@>GKENKG}3?w@8eU(?o38g1`&I*mT+^*=Ov(Fl5$1g(2!^F$VutpqI#KkzmS zTB-ysUOGS3EoiP0biGT^stB1MxzhNAlaHB)JIDA8C!f7@zp8cDOfTct1XNjd)A4y+UI89;78Sv(pofW6H6&pQcU_DQ&NoGZ-$YwoTRK> zSJch@cAVGKWJ${I`WSZ#rkGGyR(I+|hL}y8j^*cjcJM3Ty~oj2^IY4qx*HMBuOMRg z^yltk5eIr05w8rz# zJ}hfT+Mso@9+zF`vAqA9%5G~u>z1`+dDGa+KYN@}xu)8KrT?E|pP@h%B6c1*YT)>2XP8I5h&_xP`=IIn zQ|yNnsFfwGSWOP+|EGj&D3I5^u0Xt<>Hj(C019eH+bYrH@7P>{-!7YN%0c?ydw{%_Q!vlvKBwIS zsS$V08Auf+9e=1O5nNFsxT3UQH%KZ< z^WoD}l+h<_zEKM=`*qux-v6VXgiZAAT)0sn{Lgx_e_roS_2i$-7E@1RyNXIYRg|eG z&#)*e`A4d{)RPnhL_NvC->LP)V_I*nCw-<$?9{M$B|W-_za8XzU(We46CL;Y+FUKs z#H)Ub&-S&&0X~@1x3u^nU)wF9pGX|~MC2pBw%x#=ZoKmD@+W<5=b(7v7QKHW_EqG* zhtoFn(T8V4UP)^2;)_aO_q8SW>saokO8Bz%50-;G_9$NLa9;3dPkwK#f3OU}HeWsD zo7QoZNHXrsaGdqm4WnHIbF#NJn1DRGxy?COXk>_9;45**=mCPYViXKIr`+zZpF7 z!13z4pgok#bHEb-o+|LX2V9GPy4FI^62V!XhW|hC*df0hem#|(-3HovDLS3Y)+Ta3 z6q@&TUt1LZ{gGaO;Lj*c!~^^@a1wAdFzw@y+snF#kY&>1>pCPGO7n24qS({JAp?Fy=gn= zZO#ZkTP1LBk2rh9`9ko= z?%*_19Y}(n32?_EY^KnM(&vc;4-!3d;Jy^`W)1hVIe`6DTo00n(8<-Y~|SDq)l;Gyr?whZ;N1?=QxDE6;xW82L zzlHop$x5DR=B;8pV^QT2>KY&Cj|H_ zmBt+uFWQO((3?~mMWb4^24aE0!M-;?R(beN8F_&{cJhkbK96N z^idLTrqZDWc)yBkA?V?dUkdu0ke>rN6~KY;ix0%J06a(PQZ?{#$PdSJ9sGl@ty-mp z+Lq|EQ0GApqP&IvK0k0-c>cip)PpBXa1msN1fUIv`(n5Y(QYIGpP}llx61#q2%9Ql zqv_cZyiV0;FWjwz@UDU1R;3#)Of*9Ga?k@rej%_Qy$eK6KNXkttkuHrJN$Lrv>uu^&EDdiuWMyeAJ~7_{|Y|E5$|6i|}6~yistM(Z`jAPVdb$!4y}x;6uXi zfcr`Om!V!I0B-=UgzQq_k&xd6oC!?NP9Xk^aQ_4B1U(1XA3XUY^C{fTsBaZ;w~|co zG}g;|P2#ClY+4A2z#B?&w*c(;yOR063L7r^RDfr(l7ETN!}j>veuNyKJuEXw#dSUS1BCvAZXc{RAAU{3 zp9&l&&sQWcZPH4Tu)B1dPh&=*UjQ5ky{h0&fJ{&P7verg>986) z$4a__fT!WV9%bzhoMY8_!V+&Zu%9Xmdfrn|?ggNqr*xzKUJKQiMt&F5l?xmOoP;(f z3z)`|RY-3c@cR4>wU+A-yf&xmD?z zjd+_;rq!T7rk-o79f;Y>eX2V6hXK=bR|5V>;K>NHQ@<1yX;K;10%rJMuPF;l*}#QZ-+jqppRB^{!#i^ zed%)WWI~4`@HYTQAT4d8r;miK-Opo+W})8!`Bj3cK8OA$`9^a=p!>s3@9|)g2fPS= z36Sp%O#R7Fsx!Ztv1$-7_4&*3EF=Ptfy@Trbl_VdCrjj0n_mc-(Qs3n)(Dw#zw!H` z3^G%IZ>0Acc#47VP7ksC>Iv#WhRP zQi`z8Q9rzy>RjcHHrT zr$BxVWaq(s1Kf?k<-l)?&Lu*x68_^#2YSC}3!Ubva#3y#a5t*7&^R{A0g5h^R_4)ra?;|JjBnT30EdfzAut_&;{}b*b_>JyM*7_V)}%EPMhz+ zdoDS4#{3kNnAbsH+iyyXIncBo+_49F)y}%oR1JNFa_%6<_Pk!TQ#?VchElI29Wsj$ zW*gl35|(<+rGj5ja-yiW0=my3U)StrCa@>)lhhMG#Qd|t(;{J4oTdlY4!^I3mV_`X z6%VB`QsmIAa0qzA;Qj^faNuCzbCmx6)~qDx3#{21;B+OkQ0Y&rJ*`M@A$Vvlz8E+U zIG%b967RW6zRknHOH`@Sie(9EWgEh*QgKbOYVl!T*GMAbFw3N#W{}{AgvT3hn)S2^ z-G^4OsC9>%W>3?FexA~&f@Zc5XWU_5_l!|AFwL_2P^|157h0`K_Fm>uB{1 zJVkI*D^&rU223M>OyTiVI?#+)i_q^zyp4h7iuOTO|*5 zBj7Jz+f0iWc$DIAR_TpUaUD=}>T$>aC8Kca6$-wKMzn&hkxAR&dBU?%uYRkvZ=~K9cxwOR(ME&tpRdx` zQ{@S@;?aN0NL}Rk|II!3AfXrI_JN#;zuD@+6F3w&26Ezrheie!&^ZfkimDAf1%LBw zXgKJ(z!xYT3PgwWzj*|fB69vv`AilQ2?$#ZzZX?{Ybid^%Sa}z`bt=8qr8FZaer*h z+#kU+2svIym?xUvs0eF^`&W8Xfj1C1SKc^o~FpTi`bk?q*CH~#l+JU(6LyU0_o=dL?YbH%7xhvDAt(MA&&oJ}gMt52{K=B`4AA?iuve)( zA*+g1iFdTh^S+9oW+lsynRg9c4aayyVqGsw9=09hcCT6ZFH$mJr&ZZ@rmv>B1$(Qw zCMw_Aev~KTjf0!oZ97k)Pit+<^n5u%WXW$ipN zE>OB90@vWSOBySIeITb5b*~k;pVFWBqd}|3JxQhIs5L`*oL8TmBA-^sv!QbW=+|2- zRKQUxEWO9$5Le)Fu9LCHIqXEyCminUs2?u$SrRrM?qWUi!<{KSG>cFq_=w79TFt0} zJ{9n5Q*^RKC>4Hc=R=`?<8d>ST@5^0={$pGvq1Ctm)~V!|8m%iRXJvURPNsYG0y`^W-RU`@JGOJ zjLM%u!V?5Kt(=5PSn3Bx3#MPT5{*c)U`~}VQ zIGKl@wP1t|g!@624=(EfS#eLsl~}h^q+hPeBjCe+BLVDnG4y?syfX(I)`ut#kUiXG-Xz{+rJUmXl58fxbc53AW@7IAQ8l3Bm@$ zeSl`Hp=S&*wVxTna|h+!USC@(+&;S9mbl_i@Ow5J`H%;kt@4~;(oSDn3Fr?gdbHAW z3+182HCNi4a=0I*HWPibTHqzXF&J+Ko%FR$)$LxF@k^V?@s+r|PjY{NE`Q*GO8*OS zCm_!fPx8u1#7WcNZh)LPxbLC%R_MDCZwt+6!%g{_gSwG*k}Vvqi{6O^!XKvU_DD%r zI{c0)ov+oaSW36mpcjMJd6N5YHk4f@=}-M7k-tvm=X!<5sdyh3`GFn2u6dGHgtY^g zs5&2`bhuV@aDaza_ENx~27ErHx1)1kB@^y{!5=I1$AH6u6L7zwaao+gjQ12d1{&TX@2| ztf-fOKOKH#t<#P?i3avl>7{2O4?KyW|Eu&bq5dcM3qU8!tQwI|GpRMGOT}=%rSfpF zl6k4pfmUd}x^dXc6n%!uPqIp@KS})^yX`-{q6oTE;(b))heH;P<+D#v-^cEH9%Ds5 z=@25gROvRsMpkM}A7RbD!+k_#<^T^-`db#qnV^3{sBYdjm)-EO;8Xzk#eV?8gRzaa8`fNv0<0AMP+;O;EbU+Hfb z9WvloWsQGKdSWswj*pD=tF>mJXyXk zt+IkY5pGhaKf|0DU(8n;<6#_%W5w z)NfBkyO9sNH9ubj{J7%r6I=&6t-fYKP9yN`z)5(1+kvl8@zTm(AZVUF?Y19i>yc+Mz%&lZM}HuU_`wqlIq^cTgbqo-!&H5y_i08? z$xoHfU(j5o(5Wn1pj!jnkAuHf=vONJ-#Lv%QaSiu6Pbm;9jXrJsk|bYwq7jrT_uOs zR{VudU>1E5l9m=heuEVqltY&lNeY`5eKtW6UX-Ry(tR>v2b_yrMa`CgGvg z_*}IAHE`z=0XWJF@u_^651v3TX``S+h?mKeW~9pt?f{i`vQDZ(9g78>+SoMVe^1GL zLgoK#tG@)kdxVzb#qWp;2}@Q&bxxbTx8K5J{XKko+g*0zp1s-kNhNKPO6eR`qiBXL zO!(JRizN6<8kq_nu2Mlg@Km8&c0Gk~k5D``x^IVu*`R+Zv<%?)RZ400pjPB`S0zic z5N$%Y_D43uoldJd%xllE(mT~(F*{M z0JiLd5`k%!tOWEF;CPi+G~z3Q4q2e*t9Z-1%v3}oYyo(lRyzy zqIBRQU|J8iL!Z*V{AP&;y%D%XdKcBeQL3)qpz_L^d9DBt&HTq9tdBRhqKU}ccF-rQ zurxXd1&=3qZd7t;#>NZ0VW88#w0Ljnu>j|x>>}Z|R?Sj@16AFoV>~jUe-h}{I)8>Y z_jpS|&jYTdR#9Z0W6i+BouG7}QLQ(0s{;LV&`VKv<-oMY-iTJzyPw@zDIoF+hrTKLiafJltE{Xw6i^5g>f4+Z@*{73XNtztOB#=%V^ zj%wg&;5eznDZn47dQP(wSrV3#m@c@ds;iX79E`FP;rEPs7N|8V7XF)5p3pN`*3a~u z>8gj@nx(7+9%zj^``c|-t90FD%|L?wp|(ZBU4w95pnoM{Tl;Zcjp}dmkgf!{&r)Sg zc4URfO9$vF){G@EjVRM3EUm%^gQo~?YX&19_*s?SkCZ;WRQ}NHiW9nIf}d72>O>~l z^aX+LIRIs?^2bYsrF~Z|!cTiF8U+`qcr6>-V924p0(JC^0*_Q_B)j9t0VXYBf@!1> z0~|kqdon)gv7`c5bQyiql>|E3u4YKs-b&ACRYu#u-yl3^D;=IwI$Nu9Rp1*6db#jV zRdQ%G&E~`OkCi@sRJ_){WPk9{o3s#eyn*Shog!g-Kz^KHTDeXXJXd6d`q*821S5nV zi9CrG{E3Q}X7$@3KMQ`Z!QBj808A?~6)2Z<;GiyTh;`8{M7_v-UuczZTkS?Ga9@?K zhj2Fu|Esv2f<(luYjYpvAOQC!x@6gWo7e?iH2*{~z8N(a(t#L&HUuTRN8Tk@wJ{MMfP7U1Wt zQR*PHZ%WQWJ&FWR@F3}(LWd;53!!I>;Ln6N5^f3`FZ6y&pOf;R3O}+zE<*ezz~|~V zg!&kuW#gWv(mq|`#iDa9c#G|CzXN zqdpAybAYWqIu76({5!#)4@{$bAHhRUf?MQ z{Uw!_^VD-eeal>wZwYv4)!PSg6#~;>v;_IqK=Q#~P4BhA=qD-p!AcHUcQi}d2da8P zGe0eO-vuFTk>T*LqI9lSanU|rZ=|t$68Fmz@Lz;ccGNi& zj`iPPH;JE)fGCzuYZIBEg~2_9M&CmJT1liG|D~wOEh>e{u&|2ANN`2}!O>tVxxq^u%=VV@EDg?b1xG#8{MCK-y zS*vLjHbq9GA~P0lN@5WBGl2uZABZQ^0Zc7z1=5l>h3ik0gr)h?Dm+mYaNn+UR#noy zTe@xvkAh<%ry6*x@aF*g>CuXmd%)Eg6{@;ql}kJLsa|LZmun|Fc)X_aXeb*p z1A%X+vYX0b$quTJ${+3=Ym^N<5dX2lLvy+Hc#hKG_E(Y7Zi58j8LH?$@?QkMFRk@8 z;1X3v+f>=n4*PoOnF*dH)=U)eZIX7MYwTtxE6JtNw8Tp<*%rZ+Ue9Z!M~&MKdL!;P zRJ_A!#z}ar(@J9CZWaG9U~5Jq6?h{4BZdEBRTll^KM8(wRJqWoBmlHL+~4UL8(K#e zIdob>FmO5UVJhF~IV#2TQUyAlU{ovouG18*Wn8OdQkHlN-7d6-Yxv#a=0VtY3vJ(bDi$l$bUtQ>4(rodp+_+&J9WjDwjH;(`<8N zjG1|85$tP?xTo=&z611X+-E91t=*qt)A&6VIgP{8OkjXuS}Cw2Ts+*Cy?yXByW1u< z37GmkMc|164pZs9QRz(5yuecqo-~!`w04{!aSd0xm8!h@N&ai6@jHmFX5eEaXF7-N zqw;N>(&23-ht^Uvp+nm=e%D*q*P>4xWYKQaR+MStbZ%#&MLv~>gk5_3Y`_c%|nx=C%SFI9sXXyk`A@SG8HhI%JoMho z7dbSWR7&-BhV%*Lx>?)9;BJ{=`lvZ*)7s%)BKhDMYquSwl@!n_X4u{BGhNiTDFwez zEZ2udw6T$~yy_GX%Q9X43ZyXxZaTH46*wN4Y$t++hwO-|klsAFU!pkC$IAdd12_!- zNw{gpXB5&>2u!ozbVRq2$7BJbxHDN4M47@^>(OR0FK-gT+$H3kGtFNtq z;sS1f{#n=Y3}gZ7QVQ@7pl5@g4jc*`=kI5$2kx)Ri)>Dc!CwRVJLZ8pli6qw%EEg0?2oIK%}SMLN+ zJZIWnyWPTO%3G7>36MKJaMS!$q|kTiHw}2|5H=WoH2MkeMURMI|3Z7TdM3-xT^ zk*Dd&(N#wCL6DgNo~6J^BGVc<*Usem$`J50&opni2H*=seiiUM^;DC!XdY-KGwtqO zG;P53DlPA+lKa^T7bhbw#1#=|cb{1o8pl>F{&B3^6;_Rb`!1FCA9XJZ@(YCD8aZUe z+1+O^Bm$2WoB~Xi3h~e}7r4;skpQ15JgvY#DfxG+c&XP_A7@&VM(E!Z$0Mae$gBiD zTjl@HO3qahmotvnldQ{nYlh6SyUxOG!c*Wi%kDZugRa0?Y~kdO8k_^{1MCeMnX^n8 zr2+d|YstWgy5*I!O8~t=XtBVXRk>WI^r1Is-7Fbd8M5tDYaIf9K`M=O7H*^PlVxP& zY{nTX-v%i;WbG6-+w@@CC2SpT&)J=O_C9cvC2qXX-xe9EaMM{94us7Erq(1|cxb-W z4tfmS@2fP@lb<9!6I6QdQF@l^^*`JJ;BUr#j!N%R=wF8XuLXU+%4b?%Zxi`6|7V+H zdjH;nN79-Z!iLV_7S_5dq>Liwn7R=Je6EUDSZwK_)870b$VmbIK2t42m+L%L z(IG;mJs&(ZLZ>x=7}Vi1xL<&~Qs_Ro{eg>dlSOhI{_Am{r*sRXHxQl$uXwhHur9K| zYZW#F!%a&U?q$4n$oz)(1+I-Ks zjDJzO%@iG4z_*g#5yCS{&xunnZLS&ZRD(AjZn8y45;|Fi1w)6(xjfsIE%X}Dom6gf zc_fep+yFdYWY+?Jr1D|6(v5aQ<;^uS2*s#xRg%VXp;MdYeFI~9zA`}fhdWc%0c&@n z4Rl(kEQ0=_H}G4toSv__+*_in4Y)$-vq$MeZBZk_W+Ch%RqoV_b$}iXdJx=!z$w5q zR~;w1S-qtixM@dy1pW(f(~jse;i30e6V>w@crb(1c?HXJvx>BluLHg$f6D+1Nr#0&tL|JnBXse>iSr z4L(shLMI*P-)Ksu?ncuxRtTp36V-xKakn6@QjwV>bXqkC2fYYxIz_Hg=;y1NN2^|~ zcpC*L*loY66w>@|KmzlhN7ft&+j67nlhS29IT_$PhvsaApT@Yc;7x*iv9>l+Pt*lU zhdf&80Dmg@Ckd?_I7IR6Rl4~r9q0)xk$7p|u2Ar&DnENkX?fpdcfHBzY5vSRO z%D`O<{p)X1mP9x6m^BeJCvI!*Cj|6!Y5pFxaA4YFmx!k(0XR$PLwi|bZ=kqT646N%S-OB+_ zqVR_aEf?-BDlFN>R|*f!!KVwRJv~9d#fd!jEfe}}sy@)1UlsIig5SNOb1SfQUW{HNl+U8SAI-z6e* zFl6Q}FrziPycgPCJ@^2~uYw<)IMyaSU#NU|RsL(?_mE0sl&a_V2~J$dWt0ToXka?M zI&LA$q>*P0%FZ8d8vPc6Cv+iOsz#xw6133F_eTJ)Q*r&Sp5;u5D+N4bRNjt}|6I_? z8m>&@y+h>*jkqd>K3Tsnq<+>cG^1a-zEt_}7g-ERSQ>$QB{5#2k)Gj%ZS3bahs<zT#YKhv>&1x&vygdrIfB)Okc4TVaq|^3*3x4><_huGR!fx_-nk5Fl$ zQB4N=^C93ltmKs7&PQ6pL8rb=D)J-{*qUo}EauTj&SH*>_VcA7Ek%pDj-?5GFSUVa zPx9fuM&%VfUZv>IXD;Tnv;ip-}3c>UaO-G(o!%Y_Mo(|~) z(+rMavN8)0yj97hIpzwaHwJ!mGG-KHL;_nU-1z|0>E{mW!#GU;q0qto{3MZCgRmKb zX`Z+h`JV^(Qk51uVK82JX!k|};%b9CSEYRv;>tqU2GFTZh{Sud1enfKNyUFNZrYPw zD{^SvxXNK>MzZNyzZLHq>KENAqhFC74foe7?Fp1V$PWVD+BX>r{D4(nz|kZJ`V;_@ zrJ)@sDURPn5W*9f$x<#~n;ovCt%Utr7@mEdsUiDdoWGd$5a}rp0`4ux@5L4Y!o@J863eiD(%;*=gu1E z2HeJ$t--f3KUvx3LS`7;mnfadGCf{+mg{kf;-U4EBE*|V4mtIC4 z{q~^~j`cWLgD0vOA?S?xN(p(H(wKHnJB9w8k}_5PTj6(sWea_~-F3PSU4gfAjZCFc zA#kkp*i4o+32FTnsd7uQO5$)HwrR^e}dADR?rf!v)d}*w_VAn{Z?hr zs|fU~g;omuv}I`nJVWIU*+Msp{Kr)-C!Q$K!|pJxgAedOD)*B_W-#ckGujbvz#Y6| z*e)_hiEc@t=PCL0Er@*ZWP`p^)$CZM8@+wQ!4q=_k0@eLTjGHq1+Kb-St>K}4peajO{xPNJ4BT--rxURv1k)(73F*pO!h0?vAwPYIsbel4 z+{MDbL)8K5HI@mT=5cccN9fU==wAT8ohs}qrL%RaNl2>Q-D?j7re02y$fQ}YR>5bh zyt-V)H6CH3QcX)2E||WfGCxyOB|It1<}X`@{~z?u7XJ>Z zzgHTEo2k;fOT}sJjt)sSEH&I5}Q0*;K@WClc;ux4YDKo%P}btp>DGnnM%b*JvCk_$rn6_bOdz zMCp;?m+N|E$62vc)9c+aHk@i58QMnL=5sJ z5;#ufBh8C8A*?6pw=POgOk(r*utH`Sg>D^FiFrXh9-YaGj||9we?XRvAIS1uD+x6egS3%oUu+v|BF~ za`NHcDgLFv@q*KUSF5t6F@27NwdMdFLE^rm+Tot1$|OqZOuZR@`1`E1+dc8z`4!PeIqnpg5!QPwroz2wj5o-_#mRgf75+=e@| z%Zhoe@LXWXY=4O>F5TFJ`U9U-I@24y4(W{s-8xe#2)IG_BazbkHRHg(@z~_O~^i z-&i%mGePL~RcuSwgl8gf6`yzG4W95-Jogg{90J_YrI%@4XF;E+RV>qWZl++eHi<+& z=d9u#k44Cn%vC(M;Se6OISZwC4BT`kU_Ih0U1fKl50wvmp^A&%LnUaFT8K{V#9h)p zBi?f4N%bnupC-txqdWnBF=RFZZ&Z0=tqT;9XUxFYr z@-ANI^pSXJrMn7gjJb=?$H@RLAwRWQWJ=fdI=QSb-DKaz=Ny%wJxRTbSNt0>=F1}; zXx>rcx?0JhQy84I@_sk(-j4@QGj7^1(T4nKyUXk~qHC_IS7bw(PF4VSbGsgQH_NY= z^!mf?q4cLS!9#^75qGp;DpOBt58<8%+<-U>fNxN7(a8(#$opK-Eeo!6;QQ3`Xq|A8 z1^O@Ac3*JQ-8`zyLOIp|N9(x_rRR-0UfhMyr|fREPIWii5zrNzVYe;De+kOVp20Sn z)@AKQ@d7Z=^UzOc8ivE&%UTE#N75c{`>`h9()uSy0o!X5w z=#vDzN9E5NB|nhb7VtEK$9=ZuJ^YNX8G} zT1PBB7x%J7s3+va0bi@~lgv_KD<~rXHX=z-|qr@__w?(V%?r&hUtmfyXmD+aT2UNV) zGhPk)bfp9Jg+oB|$+X*Mbg|j-LE9AodIYdHjbMSTFIa^EKd9sv>36qESGHbL#T}D? z`kW~?-?!k|1^yK{0d+VAIQBFrP2^?rY)kDtyR8cNS@Ew0en`ou??x16a$Zp$HVdZn zof`yGzUBGZZRK!NaTU!%+dw*~vZE8byzgWFo7J;PU&o6l-QoALN-y;p{Dq&s2o@)J zjA3zoEiMva)8R*Fs%8mKA?^~;6Yn!tf+@gWDi80Y_qFi?lWhL@3XtV zH0sjFdX7N_dS1a&dl|S^pvkSj< z3RU?1d_qT#(CJHyPCOS0aMO8erH~nQzp+)x1n#ZUxKEGpamU_o%BvW1ir~(-Y-jE_ zme?*?h__n!>3r1+!CwJ85O3-Iysxzq>BzsI`v9J6nCDg9$EJJ$e=z)L-&#HR1J-a~ z+JSIBz)ea|3L1dtvE_cd`z!WzT@0RFgiT$;{kjJ5!~bAAsn*sVAf@6TgRJtgi z^F&Xw>nRsZwiPjuS+a)b0~&H4`FncneU)0D~oNN(z80hf9axqP;o;44l=7C zKLTz#5yl&BbuREsrKfckY9;7j&=?wdU!TQet0aUi%Q7dU)d16pEkXD%#!dCgDRIqL zDcztYy4K2_9O zWB)=Iu?HgFSkNs?^-N&vRGo^od_r6h>QdcWyZd`obp57npg_-G%X;=v&(tiXTQcrO z;rU(LWypWaTHgOpSHJ^y*Sc2l1JWj_csHmtUM_k@f{(s#nFAR~PMiIK&n6u6>=6&@ zBZ`|+SRxYp>Tek;ZC+EgY9($5B<3LueRI7~!qVzb0ZPIDK{EL#K4(QDf@M@H|&}sihJK{=tkZWq3(5YpN7VP?lA3aSEat%lqy0`vLJm}ew zo%kT{1S%7rmvCnzY%$zq%Ug=LvL58K-ztTNX2?7dwg&D*;4t6<;9Ql5i&RO_ysT5g z(vGfh$h2p3ix8O2bh5-Nhn)Hcc?6X~H5mAS%5&>&k%ll1gpGiII55q|g@_!QiONid ze>R`p6$64YVyAe-X9sFKb zb&SrB%z(_`huC^EQuw!n#|ykMaL-mxQ7(l6J^mpxT1y2EH!Q9gjEDHls%on95AnXEa*JQd|3j7CSl|ZXq0vPGJ>l!@uC+evvRa%r z@X_}^?ck|>h(|Ahz|r8RbLK+Vv7A)gL4qlbaqBw2>FFBKC0&W@%&eIWbs&8mTQJhx z`g)HE$GpEN7LGTRW_MCf34NgMGm=EK{Wa^%c@%WDsFah9np48kd{xML#x#E#EBH+n zSG+1kvUyDq9%3nVD3{UYZn+?Buh+N#&HeQHdO=_`dCMjM>N zH8L{Cv<}TE@!%ZZi<*gW0Xh8sj29m3i}ztUZ0isP{)`;9mnz9Ib60d_=kQ5Zbmisn z>Q%Y$(CI>{IcEODj<^cpew6yKNN-yXk5Q5!zd48J@vVzm$$aqFf}X7M(ArV0+ki2Ol1V$LqBpQUbTXI&JQ3oK*o2lAIE==N(9M5?-MvRX z1(@ccBS0_PAiL%8pS!`>VHa%RJ-JyTk9g7r(`t1#(pa;>oFZ5WOz+_=$aHSt-FTJ2 zO&iP^7j)4VbVEUF#T`Ve#t)k}pAEFAhuP-U8({+?jw?nFaE|dQv@C;JVuVsst|FGSC(pMeuyGlN- zPXs(7<66o)!L+8-DCwm+$acX?C0q;K50ed<@Y8u9F@g^(-69NhopWMc4jMl}M(`v2 zmdyc90!~o*Lpy%5MJCN#B!H*z5$?<93EfwvcO$(yK+kxDb!(?O{0KjT&BDLaT7!AS z*oc<{2h;qe@Z6#FEVSm=bM3D0#n2TBT&D7nY+_0fHYk^EMk8{iZ!Z4cxjZgO7y9KU zsO=_|UTfYZI+w?MiNbH!-wu=i;#|A?8<91+X5T$s^meJtHDyOv0o<*sT;5aq-+Y=~ ziS|dichzd?0RASyuJi35l{S{<7X{B#a{QFe);D3p!DpS283mj!^aS8n@t^ak=_zIk z-rS`pN>>s5UZK9W@FZ$W2KjG%l-rOx;h{P2GU#9Ws5y75`cZ!NTZD(oE*7`~H+|P8 z4gbN9nKSL_qHmPML5?TrgH;;;QhEE8{I?_g--=FOP!9nu@-eRSxsa3b7(drBkFgF{ z7c`nO@$q*WX@M*L_6)mAT#=Les4PI z`2c>I#(#Lm3xJ2Iup!pF0Q3b)&iAxF@VJ?ivq4Vr<7_oikM_9?c$M(h058;b_VkQM z*lR?76Wp}_Egot0%H#QL>pDZ}lceO(p0Nmo3(4ct2m{gPv_8(KTt?-Y`PT%&)Z44a z`yvhQe^g!_2R&AJ=)D|I?N%P!l3JH_Moe<_Y!+}az^2l@_IAmqoxO(UO{+fj#~FyHl{i$+Q%!cY4^ zav`%2ZW?K{qTg8igqdS*04CeYI>;;s9!ECB;IDeZydS-v;S+2lWg}K z3jQGAMXCNgPTq&Zh@SLCwaZA4RtIU_#K)z0*(VNSM7{- zPF)%3b<~#wPr;Kszv6@rxli&uVioAcz^SzF1^Q+I-wmEr;B?@tR9@YpbS7)A6x5UY zC;2p2AIPa8edyT)ZhDeW@wG0p-pRuA75 z!%(R`t~3s%UW@QsUwaD6XDfvmp|4PKK2o_rGrjS`GfCwv&0MuWm%@CWWy%nq;p!<} zsbtbF$voj9eM$sdE8yjD->UStW@~Hn?d~%`+VZ)!HAvV>niUjG7WCP`p-=Pac-Cc| z@#eoZe3)tmM>ww5w9IM?HO)C zf}wxWGd!ZRuG>^T(5S3J!k(qlNS0-#LZ1K~S_IS1#ZsiV?ithCHa^2Aj`=(*b8NUh z1+P-^eyqxxR??#ouKgL_@fINba}if2Xc^CPYZWPUTD?jVe3z0rQt3?RXlEgRQo$E4 z`Bwfc&k+_%*eu)*$j^g&7wH53s%Lq3W{L0|2fYCF>}UB5ihQKE?O9%NC0rKL2Pv|*0`427N)z9bxGar*K_RJIzE60{3V75}FJV7bX`vI) z;-PaB{LWG7x`@gMGQFPTo?kWctra|vNLv>OI<*gB2pa-?Ql&Q&cOCenp5s$rThSuN zK4*8g;0eGTivMm^2hP=bi`z-9`E$Ht7>74j&U4&rtay&oLObU31=DvXiUiXsx}}22 z9=-iJGp7}T-eDcwl`0=-F4OaQ=4qr`_-SUO0&m=ExKq@#MkA#((0mXkL+MYmHbLk$ z#(;jFeg@?~nBwp>bZa!5{k*Z`r^`BHJ@t9J`;^AK=Xu38Ui9=(a>z!!TJTl>&Sj z$~_YLL;zb~QV#|G7;>UTru7Bx;up-RWa%%kbxEA%a({w3DS%62gYV zZ!+~}gojRHtHm?o1vgpK#Q^6NvR!+E@Q_tutYETGvLh{Ng}lQrQ|P7CgAq)Yp4Eb@ z=|&rv5BFQ(sS`T&fSpK3X(8{~u&(Z^Ob6kvz;oJCXm__KJ{$QIiW-D%Eacrw?IM#- zPp(B8JvW-N3jn@D|w%FMEcVV4&3y$;BvAk+sNRM4 zSub*4B?>)PWEa24ClY$&zW{fps)wUgT4*+{81=vMMYbvl!hhL|eCh^W&KJ#>Bk5Wu zZL<9(`PK!ldhk#`vsHBX5&X4CR}ra#+Ufi z!ZiF>;QkP_7SZ`j71z5|=Y>uuzJwJqUWdDl>Ob7QR2rA6a;H&Zfbi3MsuXe};eLhm z6#7HL8(+jbAd`wrI}juIew7c_?%0wd`jVufe}+34w7ep7>OEa+RM@X{`6@lL6win9 z-%w=wfo(;6lDKt|wQa5FM*FpVfa6}~erd$ZO#cmcv|#GD2MMMbD1X5hD*fpsS#QwO zUzVAA{Ab}Ft$3&}ohxCd8M13PY69{$5q{J*Rtis=K4Ahf>xE7y1s4lmtKV7Fmq5L0 zdYSj&`Qv#Bd4>0$IfefsrO#HCCl4q&)Svfyh4~+&^doHAE4-5=O6c@WsSvcc;ji%b z558B53SM27zEE}(=6(avLLH}Cmvs|S;ROvijO6{1xXI;i0J7X}34r z-)JixYqs%KUe7GM4)ys}w(iJ~u$Fat>Z@$smjiv0fY*zR2H-JD2RcnSVwbP2{8gTb zC<1>8@C=m?^iC`k`4gk!T-`J36FI=^UCb7FiF@E*%)0?y{vBkXS z$+`x~zoXdfEu)M2M)A<87WALM-v(S*%qv=HpCir1d@6jE#6>HO4T7!qy}a1$_N9wp z>``9~T}Ku!+E4stu*~B}Ky03E$<2w_p>Gml6<98Sn~KzsW+h0&=o8nKr8!_+7-6 zzRAq=R6&QDO}wJgiZX24#8zwd!b2;2PQj!@HQJQQOej2l7Bj4)au2Ihzonu%kJl2U-6|ZrhF9H88ukjvA>uQyMpV!UF z44$v^vsVTk8eij6!*ifp`)j-tqYClHgC|Tq2eiT*Ks4NYpikuMti!iTH`>J?2pPff zTcz@(kIIvo2D-+U8Hg+ObyMycz*iuy82lIFj#K>9X0=JY)_blB?pfB##_RmuOLzV& z{b_9AU&3LxDV?pIIic{gb~V_5@6szsN{(ge6%C$C6#Y9TlfICUC~;9frwF!I>C;R2 zOE@(`4_9(>l>W0++J6=Kxh33Q7faZ?m7cWv7YDli4P%Mxgqv)xnuLFYl5g#V@d7{X zP?-n0%k(Y++_j?HQQ>KT+gqj4 z+BYBc7F+SQ!9VUTKH)xEvlPM43va<&YzdMmbULv+S1|3K zNE3y48j=UaT5 zJ6)}BnN^^`QvT*!Ja|HY>B}~a$Y-xoK2b2d)bv?n1@BblLUZm8@H^nwU+F`0l=Y|& zX{G$04FawJ&Q-cumeZxBysBeels}c=F9Q8?l|NJuJ>O;>=y|UbeduJETGXF*xW89& z$mTEsv_{;kRXwCtlmL-K?}aeIG>;kiws}`(LuRnkW`BFrCC5fzmn}Z`>AfB*Hk#!q zM$V?bZCdogw~ak~$=fo!E7zm)Uj_P`)|2tJ-EF7U@HT&Gjjo_~_#8yq{7AY|afb^1F;xQh>z+3Cj36iR9lQHG>vYk+-W=p$+&gR;nl5sv zcaVkjmc7GkW=RNJ@DA^%v#vGT)KvCcdskcMPDw@5q=*P zFYPgO2>nIePRP%EmwPeAz;*AMZ`D-4%eK=M@0yth>vGw0A>RCV`Se}udP%~zzsuiw zN`cJAcX{7mKpD%S^SbPywYGNGq7`9d%J>wFBnexHyBXyb1~<)8 zCO}4Tnb@mCPCPK3RZ|3dBru&4?}`76GTxhMT~~IokIaC~Z18+YtsUY`1s>RC_Q?xx zu!b@-0;nysyT69mR>rgIRiftudVe&n{y?AdGV}Hbeb4T;_YMM%6C442)LIPzrt)p1 z(cgP~CYJwuEdOVfSM-*M7J4S`IKlr2PYT?FR9ZqQz0ac+e~wNfAN&A3ShDuYY`sGXFKGyZ{`tk z0r-R87keoR2OO#DB%QnLh5QM4pI62`-|gzi4Gv)t@YDJ$pmi9=U!wkjmAIVb~sS@qf|aLDt#@GGU)P} zJMsJwb^k-V`#WBA(MmzshaB(KUG|1s7oAsEN$&{o(N{3CKQyDI48iMEywsm*N1Ul2 z8k^(14|!x}UAHJq@0VEcSAEE{k@5I1$NdT97m9q^qa6%ff_pE`%nO}V_Ce1y=p#P6 z&h`=0X~wMiL(^6_LcTxz=uC-XOUDtn?YI>htr8hfdeZft)zVS+7S?)NVs& z;zz8Tb(P|d1}zeW(9u_ED3TP z72Iy;NZ1W3y<~r!0{&R|O`vua{H4GTE1qkVe5;>U0Um#qmKRi6SnZ_G$L1?GbdmjW zyXerQ^r!Pj>yXclln*N2p-TQ|)YJc%!{#a;YGXY?kHJkg)A^7Q`7zJ5IUqA0m~3zp zKraNQ*{O8YhZ5jNl^mJ@4gf6!_kN`t?Ojd%*o^#wf!jXj)u>vbU#s$mZ1J0gzEP!} zb}W^kjjtkIb$KDKY|y+vvAdpyJm8>Dc#ouYQCSyJzI|eM&mM&W4^#O=^*rShj+aJn zf#6U4gx`Z%LZ7DMy$n1_h%4?B`O*aHK=CJLWSRR3zx!Gsrv{it>Xo0E{)~0e2(nD* zwAZ2mGVP!8=($Pg)4Q~*bk%>tJMgL@zZLjh-~#-6A?$LpR|Y-yQ{ICd@+r%CM`R?x zO<$WTM}9_q%Kf4`@CSa%{o!!oe;aq^r>4H8eQL&EQFssLe99~S-bj1?r)-y54D0|V z%ZDQ29}c<`y0v_2`m3eDbUsN0_#1)GP-Q`1stUk!RQD;Lw3vnwb~SJ+t!|)B23PW) z4eO#abD|-?{Zl@ByBdA$Fz`I4>SutGUx_=TQf6+17E{UfCkpZ-fgjglA>SYJ<12X< zA^2ImXDZoBtsLc5UdcNht!uK>|B_0cRkh)HDFpsaJs0%e$rc@c?=qv%iae=>A6c~p z;NSZ*{-RJM%Bu}-+IQ3-VJ$nz=1N}eXvX^@;xn!r*43=!(}{$s;Ee;1>l~ZUSPu0$ z{h?V38q0(Zty$PT57rSDG|3%dn-D$PqpR>-T!V|0Y#2pR!IiGX9`NA_? z&oZh!p_#2l;qkG?zi@9sywyUF!kve*Yx&%?v7TGZIae`T*bdveHe1#mTlj3N;4LhN ztlc~zCvpp~wD<`9Pn8cNl%6zel`1?>DV?i8PZBy=eMF!fv$wF#y>(GrkpWua7M{b& z!+#a->$>b#w5~uE?^z1dmv`DEt~A{KRg7IbAO%}zW;cWHW|}V&`bZ_mF8?X;TcYBn zF+!s7lL18?@;DCemB3NJ5mn~hkqdmi(x0qJ{ZZCsRlIM(x?Hn>pcjLFw$dSA<>4!e zKUL`yr1)K5T!NhXDjvVnRRc`pk*F_N=U4T-1b704{}vwB65W^F-h$GuGPldXR`_+!50@lO1gtj|G}C$!QXCiFO!E}APy z6}olyNXeJHD>@W1Gk|HXEL(W2HYD#$-p^V1rKz{pMPI24LfBgP%~Ek4>SC=ySKF6t z&DDZ5H2{+(M$lI>m!i^67Dt}&5BU-|LCubWq9IOJQo)a z{`9Z7-%`$FS)g%r6Y|Xo9@qCPB;N6~%8j_{zv44wl7JJc*^0OT{882B zY{l?u{vL}Bydl8vi#~zXd`4od#Cuqk1@%A6Av33%^CweyW~;J$Qsfj>^U8lM@-VI1 zoViyDJQ&JVBHl(|n(vK-%nD!{2Ze#ZrJBc;cKlc4o}*WFbUxtrL0F%y$SOk1m&h;}1?$@DP1b9-n^0&+Kk^eDUId84Y;*Z?QUt`P=IUnGzMBWx| z<*yc6*K10Lc%}b&O8+T{w;FNfZRIoLUDwzA6+$o2y}#x$NHjeoTX~OngTy;gEZo}Q z{>p0ex0*efbdjFXUo$_g+_pka5Zq){7$NkLxcvpcu5>0V#9X0Q3M~=txhgHC@?Y>Z z?=Q_l`<(tYTQ!$Jm#nXOA5^8tp?!}{Uz@&7Ie3b{=5JD!3B5$=FoxDO@Q$wkn&(*T z-!RV`isyc610`&*?*B-Bw!-fP^*oNS<~hGH^QCl=rCbVl9pCVG30(R24O>V?NxUmn z-rk_{p-{=U)>bmV;TbnrDsO^)*Af+P~rFm#*q>_^hoK(ZTAc zHNw47#hY%R`~4manWI#mSm)jZ)R;MEy69aNQ^TSDQS=*C8fiapyzsoG(n4pirPi4K zVV+=Gr>GTNs`!s8`Sh)-65*khx;mt(0q)Pmzr03HQvQ~C5|lnPK5CY*v{q7tu>Rk& zMTvE-GSKx+wnFfSg2#FXr2xOE^3bw2j0T;?I}s9ZBHe=TGVHGXz`5U={sUbF-}0XM zHpEr-t=)Y#DP7cldV^N{EuW`UDPiduPXfOa?smohlG2U#-dBOA>05qZL;}|Xe?x5% zaQHSJ&*mc^?Av(Hn{~O?TtJW7#@}!Y+{W=*ZU19UV?+H9zzN|uw)ICg?2Iw?w6JR)OX4g9$LxI_|A+Y%BgSv9k0@-3jGxo_8nCx>CCH4q__4v z9j~Jt-Rx7PYZUdJQ1`sH^E)wc zyOf2bC44*YpR}$oamNbJCY2BLe8p2e-_F*~4&k9QN}3RF4&2sv1v7zPQ(@^0*ml&_ zBG7ME>3v_t`>%QyXkUGu$iGOXoz|*agih-|LCDVvmEPbTc>ZZk3N#;J`re@D4)Y#w zm#{P+9~L6D*SEmdkp^#c)l8TnD5!! zcJdr-HS}rT!RwMGsN1zW_`O*Qy8liy59!Jiy^c)tf#8V-548^w60gM{w3Fw-tc%|J z^+;nD=uVXuT4!$sJrVRom97$PMNi}3ou>a&EZEw0(zKJ;*{zGpy;69t!(9XUH9O4? zIJ!kvA9$1fNBX>gxZ zaam)faL_+gIu8||{5oS{lvBs&?pl|%JGcV$`6{n!>dY0$+ zGMA;EksUNIB>ePUwE*EEi@Y$wR(lu;_jVeyfIkNKO_e|Q8)&;&$+zAk+2H?3rPtj# zvzc>o=~|%Aq!pf$@A;Wc!hZv94;7Z?D9T08EG37^rO--?qSHxiwjY?jP{l=G3h@>? zjlY5swi!3oja2*x{lN1H=~PdDu)F(#bd6EzP13d;2wO!mz+a)#PUF)!$VvZ!zvx{o z@qT=owVT`@%=}~l@Kw}q3qS3|j6~h5gnO*g;X|w64my1WBpy1{0F$M8y@aKHNGPxq z_jU;vwF_fgnq$QK%L{l>m-*6ClzRy1g>aW4TsUx;iuV@9UyIuxJh9-RZ<$6RY#MNH zYhDO=EcHFmCMN*XnTm1X&jenqvoxEzXJFddS@Z7 zQsB2#{iJib8+UQq>D;RVA>j`L-im)G;>xb)bzAGA?~mjoo|1b07KwFPHgBn*&lH>iJYMNp zs?tci|56~oyq;|y8tOT{lOWSUc~)W0;})cnZpo^@3!Ok*gopX_d#x!|qG-BZcDRmpr?<>zNg4%rDL{A9*iv4X92sPLb7 zJ}^Y+y_EdVl^kogQ4#pwQE}1QSu@g`{}bE1IiX|bPkg4EKXA@Zyj#KtJas?Gd@}wk zeqtNHYKfQT*%}4Yj`dQ(51FWJUewR|ndx-mqtDM~4%3b@ZU2e;MZrQ(*1K6GylC-A+d#QY&@s|U#lR#fdYm2}!KlA%81!41k=ADBjBGZ}&D1>{R z(*IpOW~Vu7$SMDs$3cyF_G^K6A*>yA?*^GSYhanTskBf#+#qqeZ9#u#ThM6Xr+JuA z!DQW^4gWZ}Q}?W6M;~z&nVtB(5^O&ZXj_@BX!- zK9qqkT*Xx<|HYucr^3=abEU}4QTot)NZ>C_zf{SgZwnVdzO#Y9;T(kjkYD)R-Byuv znc^QL|KY!|1$3Ior0*d|LrwzRoAvohkW&V_1N3b)r;Ykt@e7ZM@_>tg>7AbWi|N1R z2!3D5v7`RfL!bO#_^Vxw!ZVrX96EaxK{FF;QGZr9*IT@P6MBe7YuaA=dv&bm`JwoZ9tJ3bO z+fJLk@#3qG^;#R%$kMOSy)pR@haa8NZWo>plm=fZjXzX$`a**fvBx5ew?1)AJ&o0R z20_Jpg|w2@jl2SpFYz{7Jr%gA#VmvTqDDSDhOW{^bM|ZIUQK7}U-}@GMn4 zt0e7tzw&6a1-fMb(}+4-;-$T2nV>iS$}?i=s3ldu^4TlpznWR02EqN6o)sd$7JkcA zymYRI7v;lV?ghrcKM43$vK#{T-^)*O8*u1e9u-^H*DCE~c@__zbnsNDJhV=Aj|Sa3 zQzL6HTjPa+KLxl{=#{|q_DzF61;7(kzNJfAiudx&qjg!9SY>;;RchTUZ*&^{qMkJE zHN70?UhWxqlP-JNs?WZ!^JvFwAAcPz9brTE@ybo0@O*rlbD$F7_Z9wI@Yaml#}>?V z#R3md`A_>gGeyoMm9EDPw593MsY>HrWO=jCjG`+9lgviJCxy2fZXey7#9b#m)=ZF3 zlijw*n(^PqtBe6n%tPmG1vi=5u5kE!Hu0Orx;|0)K+j!9lbK10Z!+~T2VrC29;ozL zuF`1rfUBC!o?yBLDSm4YZy|U-Rq^honLCtWNfWP5B?5cz=Upzfq61k6ra_0cCVm6i z_A}kGPHSl5v&Nc*pWcYsz!CfH?$uw5UIY1|`}r)P9Mn(${k-449Q<+n?e1CTp#6Me zrFBKqJ~i+ZfZjl(EQHP4&lY&8peOFa)zjq*(?7w%{-s%1OBFF{^o*H_@}5+y~moXY34Z%>$1)f zjyiz6(|fyU-5I>$2lx#`SKt9NjtDux-`w#!z;Ruz;(EqF+bWemv}Y|=`2BSsPjJEk zvm2N$m#x|X)5=y0j#hC|AF@v9+q=wt1^mJE{X%O5-#{9Z3Z3Tt9H6xv;Q699Ro!f_okG_W||?&d|0HxV?pc zAGLOZ=|q%b)RQ>4=_Hm~gpK}__ZHEW1^kt^-_|?Sai@zMntNaUmcB{k(3)%_uovhffh&Py zf%n0$2xT1z{0Qhp2pa{wA2P#RI4rHdga~$>HUa*G7Je&wLbsw8wtLAJp4BRiWDA`G z{?Y= zAls9tQ28F>lLITj(?B|?uw+BkvJ217LAJBXLVfl>#3!oJ6@5s)dj&b6hj{mY$Tz6- zhqxzL4|)VJt!o4wV*OqAhaaHla)?J}CCI~EV0xRS2+veHaS`~ohu)CTLwHr@$c%=R)isJ(;=Sk3V0rM{1Ep>GZEH_H|OC+la39ZoOYPC zuvT1S4|9lmYV{BE>b*mFUNjNdGQb}t^ua>QIn3WiFA}=zEIQzd!#rEoLj9S;eAbZ{ z{xc7A?v@HajpZt#Wh318sVCzl74Kb=;=03ZIqr|KTnjLrT~v>-?ZDGit>3Cz9oltK z0G^muegj+A{VJ|MaC^c(7&ndavQZmSfu}2()@n===m+qhfH?D7&1u^iz;p&xb}Ogr zELBQHDlI=L9q8>H0RDznyL;BQyp^pCN<@w|R&S$tt+0RDZC~ph;WVCvf6!lSec3Ew z>8(`_J-tDnq0-W9&9Z|&Qt4^+7ehg(lk@%YAN?2C?pEYs3h>#A-?B_e|BF|Zt!s_S z+n-f>ztLktgsuOJ^`u?G#lTM7u}aSd6_%{enuI?{=|J_T2K2zc*`mR^&QUy1h@N)v z(COSEf7{*qhyUHVKF7kHrFZS%b_mZOln;n2^KahCYF+ddk~HC=bDpxmQ}H*SRZ%K* zQlU;TS+-USzD}k08qv83eruHubdtQ!5ouj$r3?Nof7{((YM|?P=vIqzZ~U8gkF|^Z zC+TeleIgOQM&&=1ix2qYkMJ3*`4sL5pZ^yladngY4?e=JW1-M#l`I!BQ;+a2oQxwT zKj|tt!t*26b-hX>S-T`6Y!!GmDgEgS;Q^pE;GU`Yt+P3sLH}0i?+ZCO(AmF@TgpPJ z^KCr07174}92Oly+xVFZ68dXKjC=Jg1%3~xJf9-}iEV7foF!pt#1Mn_A+C+BfYXIe zs~KUSSHgXlj2MdB*fP|*$nvyC_^*|+E+GA>r!Vx~D&H*2Rww+C>_2?Aao|79Z>?xW{lnHYwaD*;f4EoG2Al@m3S17H z2~6kPIV9{hRW5s!{xP)bC_Hqwjwixaz#Xh~Bg^YN&`UvYRAIAK8fiYUPUIZMoq%$0 z|Hpg{KJ2L7eTKT{QMRy%Lml%u%5Tnqqa4;c(M%h9irP8}j?-*9;ZIk~7g!!9#@%Z;X#`)YPa_B^g zX2I+AZU*X^LbqD@%~I*IzV{XfInR)#-Z9g&vo6{x;xF`Rv^!1kY$fLi<(<&!U7v*V zN`ZTyl0$7#72;1nCKj^zFE}P&M3=B{7vjK60_ zmlHT%@#j;|7ikY^x7)r`^{|cFYKb>T<&}HaRJ+}Mu26V8?;OnnzoXsm{t`-TJ6m}N z;yZlNQIBUl26%%?`#p5d)Nu|= zRw_A=6M3A^5=w*)KEPh2D|*UFz<&dmfu0Ra?Q^PxrSlXcK+l6aiTXUK5AnbmkY5d% z8Nf}dZqPYRnV>g-{;<-AzCe+LJgGX)JFTtjeD%DLML^KM9B--OvCe30M%cNkKDdJ- z%np^GkEnQQrXfOPQauS3ykF_;{+cfQZcy@nQ#@}QXxpjcxZY&(Cz;jOY=pg3taS{vEt7M z4pI5@kI1P8Jy(^HwWGTIKfAkM(E{90BO=o97)^>ko56($4pIN-;X%mft|S(H?Y4*o)`Il<$~B!n$HVXSy^Pq4+h59kHJ z)5sDKxcCH@br$}exbKy`vY+JBlIlf1wQmV1Q?HZU*KHE|MS5hc%4;BvyulNGl6N8o zo@D+=nyIDq!EJpbGvOp#dqtxhqk!pouSK~O0pFz3J6p;$A9SlNa-8HZWZ57y?W9=N z0B4@$K57Ztx)$IUz+WLc%$2ZpCwcDP8{@3flV%2@`XpO*6(WBcf$LSe@>E$oFLf!n z17jTg7oh*sev;2|D@4CEpo70a=h?yW-mKC>@8wqL84tf7m3;aRK#cIy_^z@UbzbCD z;oY6o!C!~AF4t_L@YC97?QXlRyo0~D8i;Xd74QsN|L8E|KDr9wrfBnN{R#L9l`cAi zGSU^NpX>M^oc&Lm* zfpc-cXIN}cYTc;*2*0%xGzD(!EYx)1rxZV(vXjw4{RltTy1q{bjp6)UmUlHGr$wdZ zg#4GmZ?n=N$wpr&^t0JL{9HR(b57E%tDoyz1c4qbhsLB{g0JgNXRI=QU&)Cj%MQ@f zz(;jBLHK`Gc|zm9AbLjNN3(IY&^e0YRr!2S>3l@~Gr`lKezruF zcI#A?641j`9#a3mL1bEE&sw<2>ZeWUG<)iWK3+52t7)B^%B#Dd>st{1-I>Re`b&cA zMIUdtseZ-@-P*kr3U?y@iy$W&*xEHt7kxW98hywJ(5<T?T1xjcQ5q_5CV?FS0RX41&%xggZNa;LY>0?>! z+CBYj3zQDldo3JvnsG0yMLz=gFQo%n!6$gKZgd`htYB)>$~W3=IdCsjGO0h4DLiy) zR1|O#?tJy^Cn^5FRa|>ix@bI7DsuXO$Fm1xno$k`Jpk^361EX;+5zb!JY)&jB4Oht zjqPyLcNh|pPhsF2E#)2$+^FKEK3I~-IihsCMCE6yjmG56Lv^?W4Ni}PGmizHUn|rR z)N2t;J;hYuHr)RxiFDj|9%^rBPd``d>D`n0Y0f`F@G#NdAMU-{ewAi$gvYWg%YyqB zm9u2`n=U-GHqngu6XB+l&Ks$g17E6A)kkuq0CZXx%K(2aFn#mbhW}#R^8sr_=1F>+ zf~O4bk5#fr&3fUXuiFHJ$E%m0D>uu5Tj6)LH46#)Gpc4>ru3oKJg^tbp^;uC!bQPN zvy(B96AtWJUl5+_5jIxv3B;9;5)Xl!EL_5o-ZWt9nbm=w3H+>n8{u|Beh%n#ev5-@ zEpR00DWE%mY1X?5wWk93Kkzt#8-X{H1q68NfD4pNdZI!QZz&Qcz9ZULrQuylm= z^zyUqQ)L>6y9zu$pnszB^EoAx)^4hyTNrp~PgWXm1~BbAjzrjaU@xk{UYrk)O1K!f zsYjKB`W6ZNyps8X!eqfwk2g>`cnWE45qjnUZ-k#0;>rf5b?_3A|Ebcmuhqi#_H#XT zg@~&eeDpQ9TH!yc>VK0;JFN|6A+9z_dlkaQK~5jx_XqB~D&Hv3iy_DxGN`dO{!R&ER$-TpHY! zhQ)Q(Yq;=Od*vMDmUyFpt&wUz@a0NpvXl$~Efcq;LIp6*@TMXka{6#jtF%w&-d+pb zdBX46*Ux2ZS`6J9L7$2=RttYG;je@HMU}44l>XNKy8y_cGqsAq8wos9^o#+fm6$Tf z_W|}HnZUuo?W$a^Q0ej(dVF7g7U)U>9wl*Q0Mm?KJD$-zU|Rbw>B~A(uOb9J%LcgT zSQe4M)Kdw=e|cZ7Gjvq}ldXQCg#BEli_RZw7W#3eC+(3bM*V4npX)0m-pu1rb)c90 zN5hZyJGY@n6$eZs;dbOtA~1~rQzYyb74Hn|*@53DWZ8;x%mt>C?+b+AdYcx*eILC$ zyiLz36?&DyO#%t(2n!ZhGf7p-oNzjuqZCV5^Oa1-?zmq1mMzkwbN< z9{Se7ouYWGS*p^0-2Piv4{P)b`b(-lTeC~;pnsv#ORJKe{W&h{yk#G_t(lhqU|KnA z>1X;6UcfP+->&p=DjkYcyuP4^iF|7&E4V*Dzj;EZ5o3nnNR`jl%v1^d=$+Vzcng8a z<~&>Y0L?%$zX#APA@Zjg?w3`(nYgQj$C|NffSdX!Z9*sO?^?msRwM!Y5Ad_SqU2nq z^f{r@_@&B2E3bkEaQhGrJ+pvM>d`NBjvK&nku`Aya3XG6v8x7uH1JnS=3s=a5&j_D z-oVAUd(f)6&=073Nd2@-$f<;1l+en6Co26@q*eET^P~DE@;yOM z0dA$Z6+i9atwEkgfuHgu#79Q9xB~^#{=`s(OYou5FrHuFSYWa?jYK}@`|xvN1HB0N z4po=PvN9L+Oweg%#UU~$on{Y0HTg&&K_$gex5A=bnu%2ewwhy$6~1EM3&M4`yvk` z{B+R2He}a6%B(@0UK+7CB5VrWbRI~q(CHkWQjGS?266of2Twh)wXdoYxG%lGgx}gP zTm$z(l`a}zB!Xu5^>dwP*o6A#gnOw!M)K%0!~;+1GUmz_nbx@zg>YMY=SzSOtGKRKVae7p82ySW z@Dx&hAzcl?vkky7f@H(adr=#S6Y~4B7RL2!xy<3NLY`0KZCYoyfdK(v|2( zV@nfu0F4EN=Pea)je)lQRBwgH+WVglw^jEF{QO*&_|`>d9hV9Jw@S`mr?Hi-2VYN> zM%u$00h$vxS?9EhoNY?Z?CxZOjXvFAKlj-YA;7d+&;))v@KmJ_&G!1EUIl_44tMNe zc`vB+QeP_u^)?57n`rfMu&GOQIpC)DKMwNK2J`#Tx@KC>2muV34yaAY2?ju|6wsK%=|7qJ0ZYL8UvkG_v&4K~fl1wSnX5dCpvrL4<<18DvPI@kD{erYZ+om=IP|6<%U#!Q9G0)L)C&c+y_ z5|~!Va;S|Q>gPI(+Y@PZf=*weYe4&8AIkmbQt;G+PTyr{_2+z!0rokeU>I0n@ zQHk^>AnYokhXB(v9y?UVmFiieIe~a;^Wk>}z00U>1Jn7snZoa-_^mH%<-qSZ!H%Ka zKP*B&rwDkt%4cdDt3}RsRgPqRQX%xGCI1`Yep@|{*6xk4VJKI6ri6col0(mCKIop{ zvA+5p4BV#b0PSCh9L8Z+DBWo68837yuMEMjNqSS^?yb^35qF{R^ilM~@?Q==I)|Y| zc-~QIp}DU-p;KR|R`9!uhrSpVK)Mg(`L_n)iBaWC=P<;8?tO-zd$l77_#b`l6YeVT zM1a0hrFW-+w%4i6KZE7d*Rh+yUjVnolMYPnNuu!3DoBdpIwi;Y4pA2T(p6m4FRd5; zGL^=oN>5s~Dia>EArC}37Q;;|ySeyx;wEc0djRvSQu3#D+0~Q;{R6>goeSy}zq;O|vhto~sX=(JiHEAi49@2v=10k@B3@c?{GukOq#HcjNr zP&&|=>cOB_oyqTI@8O;282#bCTel}FEoaj^QDj=H6wPqcSs&4s9N~={&U18$LjPCk zY@PcT3cpyT+epzN7xed4yskY&D3fB)t-fCY@Eg{90oe7uC;X=m=e3L)iT64spXTvW zKyShwNc}qKUOAlS>K%|@51gUWZlxt^grD1@g)UkVw~t`?WW84lIe~Cbr`0~l2>~uw z`ro5uT4x_6fagEG??(O|pnssk2I4M)%uLWf)N?7KPZ8*3Aygsy(6@5(1=I5tj<9tj z{M_HbDh2*X$svmef7EB6KtESq4G840)YolAezb#ski^vjoIuZ!@X#~pgXbs=?t#F; zLU-zZ6wPlyP7M6|t30IgiiNx+(0?>!*PL6S$f0p%3Gy}%ZtBZr3!T(=LS_lv6)G*X ziW~s`TF^gM^Z_c~Aj$voK<;}afxj8}gwpvOYYcvtpZlxmL1*zCV;$PeP~ek_f2#HD zfPMq=(EBXbb1rZjWM{#Buc`yrt2)`C2+N=|qgK6A) zHe>30r_r+ozfV=(T3=KT10S77lCpO!&REZp6c z{U@*(FOo^vi0o-InxX$V^_bn7I5cDU)JfjG!%0j9IC zJkNF7{LW3j>aFyOfV!z#<~)j>;Y2j z?B;jgCHBDe)0|@~$9{Wr^XGMcbU1B+^A=ol%xlW+qfoii2iuzO8r-Qvzt~!k09~6d z>g^7{;0g8#;qq`D5DYo(7x=lh=X(lWn+odf3wokQcfR%3gjkAy* zcvC~n%ig+Z@+Dj7+GOlDq%j_H^58cHVXK6Pj4jFqj}+b#xM?ya0Q?Oh+}PC$kKPKP z2hV!a|2^Cp;BO1*Y`R4YByktYq>s=d;HCyN=)zM6*xjLL3}i&ZuiBchyO1ZO=t>86 zncv<$*p>?%2bd*t^rSGE>*k>h%iyQY`L$^knSqDGzw*M)6T35zZ(gINL4-a5kW&wy z8PK6s;-VLDlVCka{)`r|R}HoWBK-M~Qw*MP;ChB0GuQT$&7zHfmGpijkUKlckVAJ~2HG_W@LzXdvH zg4T%J6FOH*Ty$7NonXuKDc~Z!Ou*w0`JTXgfWt0g9z7`1=IYnTf8<5{QY=Kd#{geX z6GM<653EhXdw?lPWa>_;o?H%=ujLe8#DiNW^vMHWM~f?vpAGCFok4E^?kQza4!jWg zQ!DbH7J3!j_t8KLb)XrT7DQ9=?;VQplt!xa7j>Teb(>p)CmTHR2pa^fJ6>ceSOt0n z=x0+W9yl?y^Q0|JQut85f=-h%?Z}gYP-7gH3rwbzDWT?Bu1Xth%YvH@SZqfZ9k15U}wf$s;s zRODZdyAnJV7jyfTf&47Gn9WgZk>}OG=ONyF;inH6#DTvNZkiAe22cCNod=v`v{Zxo z(*pW7)R{U7OC2=_aO4=%@eKwh)5R3Za<;h4^YI^C(#TM$yEO(+MyHius$`CmJx^CbH=X@?s? zw?59^-n@>%#mx&0}es z8nTHi5;?Ta+%7l-x-|;!L*18gjQ0T-LuL#7w7twx&`UuJ2H#HT79;#*mlq(IY&p}w z69)G$G_L?Tk-$5^pCmkmA}{$FWtmD;)5zpl74_ ztHyP5I5RC9E~S6$Ez)t#1gu5_h6tKP5-ojnX&FYoK!+`05}(UHIuyj0U|J?%(9` z$OV2AatefpYzp$AV;S80l>T}ik=6-4!P7LJU)6Ng1Ai|5t-$+*UNgS4{pnz(bDGpi zCwR64S0Ep}F7b2C)dgO{`j9PW7W^G>Q_~*;o`g&I6&o%*w~CxtxSJ7gl+bHIcOt!+ za1T-OHmP*c>3DYd=ipu@a`J(90jE&@0e?Z=ERi`z(p7N@+g21ne(5Eh=Q4c|wgmap zatZGo@eJp%Pf_N3?bz(T<;-KeO`ExK`wTU68~qoNv%6bp-zii(PgYD_9BQdE@p zv(8?JS$O;2_xF3B_dL&<`RsMpUVH6*_SygUKEj?es0wG9aA9)<+vbQLxc-Ztg^k`F z@DEXwJ>Fl{d0|^=d}#k!R++#bLr)>g(D-*A;~RnFfGbFz@(lb|0~@;tn}I(9zk%fg z5mpc4R|mHoXO(NgXsuu1L?!d)4Lp~Nb zi=I1hBCs4kqh0XbNarl@<$7c}ZZuM)A$46+*&eyhh;}GsAigCN2d+V`09CPkBB+)z z?Sw6bovm=w4xkR^Z$bJL0C#~F(l`q9M+qBf+`R@(AKN6gRY%xVIHDaS8sqCX?}D=yWjt7&fOd{t^6A#?;2ymeO@qU-wL;W{yodk zy1Yc-B*@U|6S>IG@xVsC5CTl6J>){h4xA>trr=i~UT*MxB2BIrGI5liD8thsBiEwR zJD3Q54TTFm?XazQvYJziKx&hWHs@|b10(d-iGp49;Cvd%!IYsRv%w|4)U#~1xMm8_^=3<%G(0=8!5sK z26>q;*H+kt46QQ_hfFPqo?{`xssg5Q?q2pwr)>s%mN z?oQ;N7|6%+{Ul&Iy{Z*5c86N~<8&zbPQMy0i1S7x9giu%Vsx=zAl8dmR3uAM3P% z>m!VF(W0lRJq^Yc%yEeTe*>3Sj%fqeVsD~h%4sUBDJ-82_shU}%>P-$jt*Pe+$- z`j+WOqi)Wc&X0hKbjG*A>_o;bG&RS#p3SR*dorucovxPc7*_}t?<_Bu9>`_UKT{Wv zWnQA5Gq88MTF%yC*dY9lXS3~B4_q>Qz6h6=P_-e-A&?mXof*&*4}2pv%db{CFA(x= z)C^|+N;$Fyei`$r%7}#?7u?UoZ!fU>>VebsG_vVo`AGA}4Vg`M;oT9cGKE!8YT*6xJN?XWYL`7|;T555EL)iN&7Qw-U- zBsDUt^MK_tUg~alA>8aF|0|Q1G;qo7GWgBpu=FV=yuxX4|LPaf@~Ha^ULEKU>V~lV zHPBy?4Rh!p`3XrfRDN>6Q#XbP!D;bY}q5Qe_)-rUKI^lCoIm`G{8p zWXj>DSFC{f#@3TsxTEFB3*zE|{*oE$ed%ENZehb`(Ak9a?7}^o^$Ns)D~JwhX+|8q zz;X#Q9RijP+w3z#2Q2XjM_9I*O8-7;qH|am346|_;To2qW#e_wpET2-&Q5{jXAWFG zU>u_5245~)rf!sHroVY?Lwt+j4&$<-Wu_Yak3luv4lI`*8zXV`;I|pge&8gdDKaZm zdx1ucXDR&`i@4CRj0e?L&rDTrG$Fp7z(n5%^))W0CW~G zrgFZ7v0;A|+;>2}miZ>glrugm!d)lqqyr3_SY{d33ykHG$q4X!ll}D+UAAk~5+vi2 z)9mz)+ppoF56(+5*QjkeNz6YVlnb3MxaBnaF`;t^`*lM`E^j<8WD0~|m)z<^%ZTv2 zisAP%YSMGKbbvq$;}b&9Ul3M1^Vflb;kOg+SUig|{99)aT;kU&{L*LGUC8^YO=8Dv6)trH(-Br2-1K~VfJ=dw(ohv}#cVZFU&!)Q zFO*Z=G27o{$^%{k`7ESu9q?2d_Jm9`Fded5&3=7j1y z92!xcbk9~@jBccfk8P$a5Hfa}jhmx%($e5k$Rxr&2K+Y2rp{5@;5uMWGVp(Bb`v-X zc$~=RPtq(Y_yyq8kZCadW&v*(&o`HvuN>9}xebECLb&B~R9B^zWkyih&Y0>EFJqcn zX+#{W;J!=fr#40=^t6Ic-RCZrKZk~}Aa6_YAJjk>9e>^nnWPj|{^SD30LLP~H6lOQ zQ`EA0C;Y|(AEeRz6s2=C4fTNUPEqMx33*qF+NvIobVvujRD|^c$dlsl`o%!L4DP#y z{arMi&N4TM^ce-d7dne^PcH3@75X0owXom0{N8oK{R0iT!*35TwR4)0K5=vXU6p9y zLSe(boDZ$w|4vf?bCu5DXo!;WDZjYhHb`TBmazFUq-Q4cjTx5;xOa%Kex*8sW#rOP zT3%f^*Iy^50M~(5&{8|*-)anR&Q(WH7?)hax!g#fYyI(!0=|XnHKa!%@K}DY!+||) zXZPHJ%Q=_P@~CT7xSGk&zfr`OKFQ^T&Sc1)%Q_3L^&cuya;@6loyB@+7`znWKLMN>_NyYstP%?EV*{@tuN=rK+#s$9-VTOu+bXh^>-@OO%LnYMK2K|TX~T6L7j zIw{Rd=K0$vxr{GIeB+^~V4m7OXj~~YQx1MP_!m)ohUNbWz88763GOebjl_If+2dur zji#dDw*zkDAmWhup<0pbZ;JG%6(?QncRIpKg#KQ*ub}z}??^iQM$_=l{DJMXc(`eW zO)6w#fXjfBSmti(yTPvu?sI60knYb9?O%E0p0B1dyOCDKz{SEgxny;hY+LewJ>>7A zcMx{g0N>8()(D(OT4gua==aHUJbp+z&Ub;o}MGw1-?G544eskK26ucZz1q`G-U?d z06Y=)RIq+pb>V@0HQe_C$3TDSg3$hcUmfrkxdnmhcZy?bsQy{|M#wiWP|MrPsoVw5 zfc-Ye_!g)#qz(Upkf)@IMR^qqOk4Vr;5Q2R9+CcXNi4PVb5ps!BxJ6VEB5%gB&VwF zj}GLAj8s+bm8AOH01fC1*QWZ*!*bxQB3^RIYCBD9;=UJ@&t-K-YH0uQCv=%<7@zgW z3wxFtZT^MGKQz?^89Ok2KD3wR|6rLI@M9Q<0DlE}=RzfO3HUC?xx&sZ)b3@zT)tW& z(&rW-KT+6rCp|m%YZGy~0#v_n;CojCcZ1M>gzaox=ug9T;2}bfhjH*Cyw}Kw)lhI;|^1(;9EowTZ z#(r0UUk9AL7;RTdFW43fyjR#jOHHB{EBPKOCm9bF>3k>S0^Bn`kDRqwZQV|T{#4+H znV$=sDD1hE`ogS}>U0g*3+muHy$YE6bUm=GcCk8G&kfwW*nb2IU03mHp)T-mWWPMwS6LvPEb5_#)Cgu*hb@NA!*7sG9vwr z@j!Q)Y7?g8zcS6ASBrs?nsu2Ol$7&5WozYIME?3db*c?c&1Zki@5VLojkZN;<8U80WGFfOC-U%rH=;rN;& z^5I`Z8q&05IN82LmFK;zf4{KpK@s<0q4NZdSF?Np^OKhP^JENgJoRmt@>rP&m)=b~ z^Qn*KWK739xfqv;aJR#@5b)igS2!-7rRs>XN|rxU*!GUF|6+bdHISPs(t(yvwX@%c zPTLyN!u(T|w;2~0ePOuI5@DT$Omw=E38FmD_zISZgZo`!&%+`thp@*u79|t%bOeio z^%x(o$%dPb3(8}DAIGZ@?s%$?@L!#-#!)L+hL$l@;Te^ttGS^%=5GYmBOcA^Y7EMX zJmv!)A>uVn*hVMIM=j%LB%UK3lHbibn?xMx7{zdwH;(#9T&AYWGnijOy9MAkd0A+G zzDfl)4i1e4zEgx{)afPQR{%G{{sQ2;XbBDLYzO5srUOwMpuY)ja*z*tJmzE`DK_+^I+FIvW!Fi8ui-UW+u%{3* zPUwpOpW3cIN&B#|cXQhT2DG>dR=HKuxT&)7Lr3j8hdk`@0IAbQ_3P@WgAP{+v8 zRSkTXu;Fg%H?e-o4`r~Yd4)Qj%D83-dtRmSK-~9%ZetswGSo7~UObmT-~?m*2lxYF z^H$oTM`3~gq2SYj|EU?uhM6KP`ZRYidaeVN0C$1z7HMlN zd9}lj@!9kAmFfr+C+leuwymc3h5596C=vRSR;rx_4M>MF;4oniO~HFs4y;>?;Vu+5 zB%ijNgRVx%Og8EQ;DyG#7%+|7*R#%(G=~cuyed?CR34#3^8zeG%VK*N?_%4mkWZw! ztyPL&Ps^4Nmk7A`()bDUsc#jxYM}kma9<;AE(E!tKW&vd*2N8+4O}7Yp?5h0`ZHIl z?ZUCZ4Xgai6-t4>Ls)ej)($x*1@c0^VwGAxO_y(#e|(Itz0?k*_hOYAf9PgCvxS{I zg-%*}8n~MCGqvp*hYOulR=A-v3is#$>I|0Ype3o$pSoJj9or$33tS|ZDa!FWP%6rt z^3`hWq78lvfUl-D?P~v+LoMUSWSvX%ap1SE_P15KC@kp7gPtZ}2bGiTm*yG6slN<2 zwT&zBEJ8BXR&3*<9a*us-wXPu2#c1`G^U^*oaqlM9k^J;>m%_l>|~wskfEi0Nts+1 zFuyoc9TVbaKCJ`sFeaNT8PhS$dGzkUy^hKd{MUnSH}Zd`I)28uDF1h$9BT)kc5^kd zo*KSyU8ClXBi1NB9l=t9xOPIWm)iEI2ZM3%V-Xi>Zx^HNN?D`U7&svt2mFbUp)rk4 zq^DzzTEZ92`qTJ+;Tm=1k8#ZhdGXvc!B1yi9`G)iw?+P>{7}t0?-4R|5PdD>|23ig zAC+rZqn2~~*7(Pc=n7nmHYe2=YX-ivZE!y>p7melQEjKqkB6^Se$NLbuRX0_uvQ&Y zoyvR~KP-jrthK5ylfiuI8>WF@uvYc=bD7^N(vyx%NCdxXt$M$_YyIydT_tP%%SX#k zYN6{c3R`eR6AM*}$k2AHSn#_*)W~R|2MtVvlFeisJm3DjW8KlSCnn#nynhF>LoD}p ze_qc3{#4i}9;Kc?vgn<8EmZrL+-`xOvnigp`*#Y`mCki}$m^l~AK!KYpGAGt*Od)4 zR~inTF>t%sZ#?j4!iI%3u8Q|O>2-C?lyTASu42|hJFwla`^$G1W6B>P$gh=fzeeo_ z=3gh<@gk0N>|PeVSFiiiqz3p25#P^+ZIe*=#l4|)h8go`Z}{gbqJcjGzlX!6rEebK zh&OoW7PY0{2<@L&%Y4Isv=?2p{IL`A4)E8*wlofl=F!s@glc(khf-a|{E4uq;thW} zSc$T%4(_Mfw$e9Lztp&B47Zx)X{&n!<9R}cb~M(L{oYW088ltZZ>Xa+jf*}j+%OA$ z^*2H_@mlG}wip-ni)_#n1wFL0D%GpPY7^->i0ig+uNpsXpsfX7|1!`Fuj*6fvR^7+ zI*`q?ylM<9@8&SA+^dd>E?^m|drBBTDf^8g+!A3lwTZhCj}G{u52e(yUm8PC1g-+z z`zLEqy1Z(BJ!TL0MVTK0H+@8^m-W~vU0CKz`L0pFg85@*87iJdm9U?V(+`ABH~gGQ zW8X;Idf+8gudtp%PS0YvUxZ8<^J!~J4dVpl84qJR-K3H6-$Weg*vwAmZ!qT7;QmV3 zKM~ZzGH*b}@utGY64hw9|1IpNvA=Z4r@X1g;)7Y9j<}CtY%H10gquE5ZN+~YXn{x* z+6meYJ*97|?M&J1R|KV9OFOn9)9|J`g4G3F4IDu2EtWUt5!>EWY1qts8h3L-z5?#2 zgi=~Y>_a|@dP|K%wc=THzv(YGe85)d&m%pMp-(UbzQt{N;8@5f!(GK?eJt>OBF$;b zQ#SM`fn(q~p%DCxxBN%yRlOCee+;+0`1^A4 zjA@Hb2;=)@e^>-y?2s;o9IZD>XZdr*`_ezh+@OxCj=_ID$jkOrHmEUWx@sEKQD!Zy z=K&G!v-D0ee;CO6w!#aHc?Gz?5wS{5%YI1k2w*H{|3jFCz~|8V z9`<{lNUID`H}hYI-v-9ixA(wr8{84_>tp`YBL7pqszbeB_qJMN8n{=<80(v3_o_L< zDCSf0ry_48!o9*!0(>cz=SZJ$;N{HD1*WZl>B!FodsTldne{9Z`HJ#?G{VooJ^DCk zsv$#b^3WbB->b?NFZ6qXm$APR;A#<<`#4Q%_lEYLI#31tjF2H%h-cBUS8d0wMz|sS z)Xw`_Jd4)7YU_(}8SUM!y=v=4587Hukk4WnJ1~71&xU^|D2&!mBhBOY`P;bZz=QoN zwZ|dj-lyW|;&;{q_qQS+(i+@KmZ_)lS?Da@7uw%GE(3m(`V1^Xp9!dE{OoDt<}LdM zwtYMIsd*P0bhZOiy`PG-jcW|;{}8MdIEHNt2DSzjkWBCh&PMWI=+|PYl$j36Kd6#g;*NZqdvfn7&qxF}``v>-C68Echeg(|0k>i^* zH@tt~Gb-P&))5+4veB0SpCp@D-k7(ph5LIE-&xd`W|^fzk3NS7eiew;esv@6A@780 z&kK9XAY*?=`F&WVTL`ES@?HDYRwWzD{2;>pyO5{(`v}OpAXh|X8q22(doC1dzLVy3 zS>{HX+oo{fPNcRC^OwoFXHY%!hx*z5X=r^nRC__h%Q#xO6Mkq*a60}Q-%)d%J?{)m z&vv+dB3@K(N4%?KX47~R;|-jb;@}=8Y%uatH29NYe;Ugt@cmM_uQA3|f$0diVEh-p z>p$Hg?_Kp-mQ42h9}ySoTgJo2I^0`NV*&WDcvo#Rs{y|nn8v(ffScb{ZT<$^YC@VUeRC(^@u%bnNqfeQ%GJm@DqZGnEMx1BN&Uk~ITg1nn`J|g07Y$I>My|KnV0bm*nu14JJ z5BU35We3zcOXE5$!lktcovi;4ko7%<={UPa@Ppq|Cvdd0OsDWmZLJ!lL(c(K|MsFj z%zV$^PNwT8A%7pJ32Ezq%%dVM7mGAWp>ZO%fyO18pfmeDwKXn}`318+iV~&^$o#4pLxl^KZ`V>E#J`;&im?pp(__SMub%>*VD5-O^~6j zu(fQ1QQwunuhsw=*EpePxF7bP=+gyxV@-cA@MEW~e{E&`G}a%|tnhfD^A}mpQ=6P+ zXxp=svC#*KhxOmle6{x&UL?q+qQh==*BMBJ%g z-^lzfVW*e>Tbli!{;6+PZCc}6E%Zdg{6M`O}aGfosQ#&O z(Pvz$Sr3gtr66B5z72@~Myy{7{WQbRw*7 z$b3U>0N}O{)cT1m$fSN4+CQ&D7xn!U!B6~9&F>nQ@e%hZ@Qsi7#(t=dvdsEW*}Rcu zO5h$wYucDU6=@qm@5qPh+q6>QB5307b>!6Aw<)v(tS4nWw{?a6*vm2Pk6DnC|DC{gn z+I!%>RJ?=#lxuOs`$cPfdr&@?9aM7<#zk$CM%GE|?K&CL_@N8())uk$?w}v{2prv_ z_LBs+D8DbsJpkfeqj#zo=@8MP#>$MVzir>5=KEWqI}`Zv)7D!j!*3z@m&vs?@SDxy z(%f@Ji+{YL3VE*q?k!ZdGoR+s+ZaRTH$V7|A2Hm zB%;9ok!D0cQa*b?sf_9KyvZN=YfZXhKk|=y(>0Fy8Q{~duL72j;*z2Y?%zeKwbM!p zmKh=J$)=f1<_B^LHo^Tbk!mlCCov0Dir9KTQXAI0*zaDpt@R_dyTt*Sn2$rXbHI!L zSjpUD9Q*)x9*yiXzk;8H?PL02sXwgsa^;1v=W_AnXk@b%VWz|HAHpwvI@`_qr_)Xy z#tV#DGPpMg{SlygmZ4RyF5t?KRgbiV`A7bwncPA){v7O?>I7`@O z^gzN}L;H6G2DbWZj@(vN6Lcf|3}BjJPC>pZ0Hz%eS*-I4VZ#!BzNxMJQ6K!*wyK&h z9C0rJ{wKdXUf@?m-tG}`4D&;Mb|DSnc0hhD)ux{g>~)8H>MzB5Tm7pKx`1z#GnTya zF$VHaiv0Ylu%UnV0Ay|weyFK^$x#fBODFiRiLj`oP6x03Q?-I1_%j}@pcOHAUJ;+E7IPf16?hTr>G@P0 z5*_{7z#5t^3bq^RoQ8XKv?2rVO3G)U{Rb{PfD8HAWq+p1nLO4LFIV?~Uktycxc9m| z9YMtFY(LajCB#vfq?7Gz_)N9_TPcryu4bXyI4t8lKp)(+qcI6#HGif?C%ajOMqvU! zSNILNvjmjN{BU7|zS|yY6^?svLw^bKYU1ai{U7X%{ak%OrJVha6n<%!Y9_+X!o5y@ z-!p*Epix5DR`9v{Ot%kdTLMfgt(sU*ET?BJg(dPo_3%CL+Y0_uBHd^=Z6%(e_j5JU zWN%Y?ydsYAoQAeG^@;CZ4vR*GvJkJ3Hnl^=2|Y2uPYeCjt4U{_`#>eAeO+zpP{3q7 z3pem_Vdr8_pCs`2Q7e}H(rTRsq-SNDs(p#>pflLHs zszv&oBjoA52uJ#qexY{87#Gz?T}bmx@WW~K4cmXSh@-LNq3sK`a^g#A9wKZ#$eQT}@5 zlPuupgwB&fCzbihxL*J|O!*e_<-k{P-p&QK;U%wS+i1mQ72{1JO$aB!ZxikX8$H-B z{WI=fVEQ;mD*n4bk;48{!XBDwu0me5eHE(BhixHWsj%qt^6`xOgnWXCJFT3Jg6?=%n&F8#49aZx-=2cEq-P zrDi?~ke3?2QmeFTI9xiMG6}d0v`4(-=g`4mupy!yEo=^}^DFhSrC3V8b~UqJh5Xb0 zm6}oRVVyK;>;cYfS0h=`?TT;A3OL&RZGg0Pbx2YMbjG!-(ZD>!B?b6RS+0tBT~8~} zp+CD_z2n`GuV_~*aVn7aN`WbBMAN$kd?wPTfo+geYBh4FGTD#!vAbOznq^#c0977% zb?s_bTrp&#z7Ew#D`LOq_BQ9kK)4M%6M=Jt&Gd}IS>DKRsc_RsaR&3>67i*vYvwYa zR@L|regWL)kxfYRWZ*4AKb`KA41N>%Q-u9ALs`XoOs9Rwu!YKb+#ApDg6*47y?@dA zwZC0wTn%!EyEkR?ct4DJkEmkKB|%Px?1%R^GU% z{aMU<-j=5b$W=QcUQ~Of>t9u4{Z^gBZ(RFCSpTBYF36`r=09>rh0sawW;TaKWlJ7oi-_aT#!MCN)e1c; zxLmFN)<1fv+jQE;ZflXwg^+0!w&|N1(_|59@@2)NfzD2f^LAh)vwV4|r zUjlax+y%^^NUKJ`uj){%uWFf3)6Pwd>D_FGOb6Vz3jMht2l%ZWst?-+Z2K-${|*9O zA>XMsk8yoRBQkiVG2mw)UcIdUHlZ_5;6dV9&?+(KcS`;zYGWg;B)F&3EIY6Zc(+Jr zI&Zv;91XVLzh8XfNZ&fmv$Lw^h0 zm%!bHcr^mkX&i3+_k5?ySmT;WqnY2cZKsuc(afh+O+C;V@x9v7)r-6o_q|%NTM2$4 z@F9NgdBCSQfK1>!3w}$#S095+WBo6Qv?Bd=%%@Yt+8FN_dHyd#CyhcjBM(SQ>&!hu)2*WS4CCQTb6>@!wScyRv!txo1}^wk%z&*f96oOqE>%Y3SD z=VqDBH|pkTQM3S-dHpMg;ih!0V?O!IfDRwrba-z(>}&I>1A7Y)-)`Wo>u<{1xQ;9o zV+4oP2cj!+-wxa);z@mgO0-vE4*Am^BJPdg zZ_eJhF>CV{Z6%d6a7X-z{71VYe&lfpVf$tLZ^gZ((47b!jvv+Nc|G3uXy7CfzP<_& z>0R)nI;ka>b^b~x6ELP#CL!=!@}pWMLsu4XDC1V(8A#Z2ln+%us+B;-MKc_BYX5_O zHSB4J{k1=;8PiT+?~iI%Q!(1uw!>;yeJ_U_<5#24I@=E`{_j*rpxo$!+?^usG!7Go z_GkKGRVViQ6=jQY;zc}Ux@c7*Y;XeS3q1t_8z<-#9}ewb`BVu!Q^b+R(41(ay1}QN zd>*!QCbe%FQ~xm)`LyM*>Oa;W_P28y8Po3hcE)s2Xa|iwz8!Uo;%#Hh`8TJ{Z^K#qI?aVDV?hS(v0^x4VcDxlEBXfo+aW) z?Q>Tq>gP_Cwjq$I1Agzc65U+AIy z9Jo46>xRtVXm*Wdwu|RNyZMvw?AjpnZw@OJa?}oqWqJB&Kr&-019Ht_S|Z#D^zI@J zGkyx~{}`bzBQRab^N{Bu6HjCBNT2ebRDUNLVHN=2CG4To@^U$>U|LytWtirL`vDqD zXZ}5`zXR^)sl5R`T|cQhy$0`B(@*}7cDDjIi!`Af|5f1Ge)jMF4Eb4o;Jl35Lcp)e z9ectyYHxSo-E{q|4h~QH**~7S>Wkri zhw?o0scdWP)k1$6n{agOvK~V_$oh-IvbuVel7A~mn%|2EuvfO@tpLnGJCjE&K# zz^+hj4b2>YAKs;Mj04#Bi@*2P{fp|!>sOaL1FagoHZ#RE8474*unW zzf(L%Y7N_AL;SD)bPM@a%|Mi+1aSg?DBh4Fxo%PJI|ilys^U&9%QD8s=OHTLrgNWC zp|ki`^;z0_mU)p{`HZV+{*&@m(0tEO;I8_A-p6#O_5 zFZz6gkM$Tm$!56eb0iJS|DD$FGrmXILwj|*nQ!#;!ha9d){&myd30N@^@crB%>PK( z>=bsE(wbtH`Kt))3Sl!nqY{=01{E;Q;&jgXU7fj_%Y6FONIBzqA}kt-sA4{?r*8e- z-^29&?oa1h#`A^WG1PJbzw39kpE{)5KZ;>ve5GH!e{D-V?ma_u;7GTaZgozDo#pA+ zb|>SH<&oe5=TZ3pzpiez<|Po}rvpy`_ORatGLF>Cf_x?JwaO#ng$#Y^trvRgAydG- znr>D8w6jj?eYudwTj1U!o~e^+V)*Up_KzBO0xuN$y_Amd8}Ub|zBe=a52bSwsDkq2 zAF5|m27kf8Gy|8(GWyy{#x#Ro44J$?)K|@d@h+$Rp+42#3H{l?ZwlLJtw9pxD@dMN z5|F6_E)n^P)|xrMEB!-#`nv<=S=%4#(?X5#+YNlLT&p7Ve=5@RGr0$u(wEBNBcXb0 z)wmkuekO$LhW2k`M^qfCWtKv91l;!tzjTgq2hus~h`&5Z1is2IqOT8)1+VIe zI={lW=yN%_tiSY6Mo+v)Li<0rPM6X0XgZ==15x;I1>G-^4t}w`@j$G3DxtkaV-{UPUk&(sLp}R-b24pevH2g3BQ5B*9)Cd#wh18b+%RV zG5;u7C!R^}F;(sv7tL0-fM0dYe>Oq^@IGPld91VKn94K8MQ5kCvVI4si!q($)r{xj zh5KCg+X1{pK5H6rWBD8Sy@)v;s_)IM1>bgDeeyaH*nV8~?h=vTLXN8u*g`7LftL!M zb`C2Y{6;zY2FijSC-~2^eDQI0&b)D@3q7>jqwKh{jm}4EM4ogXS6?};XFg>^H~8&v z(`TVtm`@|fd3Y{0aA!eJBlBsNt{UyYp5tmHryA)Kd_t`_@qia`LiHXyS)N7*tS1!y zO5}MOrAZ>&PpC4y5;z5TzlblD2_EzS<4&mdY8~2CrNH#{%4XP>cS4;@PgfyuH?8I5 zu;{#wCZtt1+`FhOV*WCbK4l_*Qk%byWvCpefc|Q@`#)3FtM;)qgXcS;R#C)JzU@`E zwXxs+_WucgeHsp#b`f_f=i8B2W5Hi8?0HDovqJFihiz%lm)h$u^D}_yovMePEMWQ? zVnMI6fxdB2#hBWA1t_m-;if*9m-#fC8;>+EhdV=r)dgIRbnEI>`NX&$7PhSv^0W`H ziFK}^w$Mq1zZUU*M?BMw#ybMJ3pk(KPpXkX7yEsj%Lg~y$+V~8q`$sMXFOfl_KgVZ z@4`;weDzBBStV><3H>#!ht53jWK6Tg?To(_I?IJlIycD%nHJFf9G8ewp?aUxhx`x? zoWb_@K&C_Jp>qJ*KqbFFJm6U&W7wYpOlJ!9A`LT7skUnh^f-WNy>r?rZhNzQ9NafS zz6A1lz~2e|^hup0=+6fKU6JPh5cbn|2bx*u$26MH_z@wqjO}kYrM^*;i?FJJn}p8Q zB0c{J>OSR9Pvd%2wi!UBkV)(d)n_Y{`;`1k)GuRfoU0twr#>q~S1fQ6>&WU;=dpz2 zzXY_K<#YSgtZ*=;J@5g!FN*pdu)hlY7g@d*_h_GmM+Ri#@L^2*Y9c0X& zx`F=#zZIuwt$dh1%i>{q>WB8A9B{!+vpC*9nz@5t@r>wODvc-Uyv8tXHH|2--}i-W zw1=Y@{2tId9BvCR?ZYmH-*({LG_ncY2s~5RZ_M}xo5Hj*s&^n04NRZdE9j&Cw~B9^ zNrg*wZbvVjWfG>B^YJICT_2{OukJfVUwIDGN6nIZX(dCL-u^ePH6mUWawM1btg!yI zY<~sZ!T3+4_XXHk^;igOw1s>o6_;lAOY6#`QQo$}y;g+vGL0Rv%wr&anSvpi>>^@M}JPUzI_S&MQGd}{MV;Xf8cpL|L+tFVlHsEKfso-F47Ao4J+crIuD z2O`{m{>iLIE#$5dHXG;ORDu7Y@OvSx#|N(!L}v|U;J+7Sob_k3gdsf?4gscg=wcg= z(DP+Xs~=)492XIH-)VcDjm!AjaXkF27xFZcp3ZuVa~(Z!8#Bknz!XO}%g`q~n_xpF z+>eWNm?OeEBy89q+XnDk0>3TrYqaaE)-W}i4_*%_Md&fk9&88ypj`P!GY8;B!!Mn? z5NB1kRSJ9db6g^<>bZ16J`?yyxpIhR4B0Q8(NYGPJh*AqY&P?0Urs6G^;E|}rjWuF z@uEFFbu9A-C>Xc`L}iEr|2`0%)!B`F-fdOyMH2Kh182!+aoXy3FVZS}kSbg1DgA+| zFCREah4pVxEaUBCht2;iNR7(R|2tQr`sOX8_+QY@pRenXt12d^(??fOU2X z+l(2`T-|`(8#yN_%fH1vH z7z6nbV47hn$9+5SO~Phs`;}8Y1AY$8%mpZ&ABnjC^d~EuV<1N(@`3Q{0;YG$!+tjk z{ZEMW{8^q!Nby4%SP4HLi?C?rK^b_BpkIVMoh#!8uO38a19Wm&B+~vf>H@fF4__DJ z7(7^&?L90*vpCkl3QwPrI&Hx%Hz0J|oXAjfjfS(lcyNKI&Rp5UP8*^C?iKHJu8v$-(_e23Ootsq+J>|f# zLm&2d)o2rdw~2NzAn$enw+dV4`l0p)jS(ZBUdYgxV}wnGNh^Tt@N0#e&LSv=Of)c^ z_Z`eKRdVbJ6vzB>YTq-avj{U8XVYq2`)tcvXY{Z($of?>4rf>re7x_>i#vHj@u>pPsf?v77zI7|SCd z|CG>Ap8YMt+QQ{&EM#sLHq&Q!bB8GXuTh=E_&Sj$#^)^yaPMgmE{z>G zu{@32dKrH%Y@l<2%9&4fs%@ykH_?nA^N&&8#<*Ip$^q_W{{760hyP8&e&g(HJNVQ; ztb%+Ja1YxY18mGXdVnkN?`9jQtVw75j0o2$;=WmgMg4&qmZw!_i3qEV?4dH6`IO92 z;1|PvGri}`|2xgfg5M5zF0~z*PkYlE@a%eqs(j_de+y`yu;Dkk5|HhQ7#60V3*QQz zalmaN&5g4xvcabr?@H)P8y41|-%@~|74fBK?HH!)`CWvyl~xVIZz1k&hR$l1p?4>b zF^xM^Fg|Xq_=B6q3rb*H1F+Gr^8tS$?3^#+_zzln8mRPaW&5Mxc8m0XPS|X;X}Y14 z_QWN!-y9*|Eyo1eZ!F|!3?-H2w+MMs83=wkD2L|%ScbkmS-|)v8k<9yC2-TcT@~|< z&&syKO=WNs^B0IX{+;F<;J21+pm*U6wwcDGS^f`UvoZJ3g?shDwMfs9GgKWJ3_VG} zM}$0`lamYC81QNDc`D?cz_&oZox|EH;%l@K({b;p&_kd1u0x*mfS*f!81_rwtMEd9 zBiucwoi*VGuM9-r)a*goSPx7``9%O%0zXVEGEm-HhllCk)-o>r`$nvv%Hd|js|D^? zsJw;W9$?z377xF{@Jr{hL=5Nh9QIft9|t#`Qede4aU6)io~o%>$-$cq=$8TCo-nejVJ@-YrC*st~y5w03Y7(!3gcIbYGJ`IUv)J2Cs(ivxn1t%;L@uxSe3ptEMv?Fw7`A4NFl0i>RILv!8bmD z-idoBgmtyT>PP7T9%=`Y97e6o~2qJO=k@(-8Xrq&JJ8|3#(X%Z2_H89(64N)ac z2-yz&3~cCzJ<-4&r=2Wak6vLM_!eHA>Hsz~E(NAug&VeI1A9cCrw@zgakxvwv!Ir2 z1@rsAL=1N$y$j60ltw0zC#&H$PC2dv-X`=f6*}ugexNUcb+aCmNQcL$rviRrXqdiN zCn}VCEi@|2*evAfYud5Qr{XGuaXF}f@pmGv{sQ@|P=7Bw*U(8V7sly+@%}I4(Dm}4 zjC6V7=M}-v7xH&sNZXv1PK&Ua)^s*8pSpKg-+^O*>13A>mU&f# z^}UGKGC$O|Q7@C_>xH~=@=!YN6$m@&(Ro;YF{qfaRoHyFG1Gy2uL+%tKrJkPKFA5& z2r@=6+JR}kcsGYWvHiH16wN2R;^mPnt|VxN`6MMzzz6MqB0!#P1xLtxMTo(XyhC?3;0a3i&~$+w0E&2 zT-kFO?G=GcRd`tco&vgzlZtA=|53z+);&j~RZA zD z1&$8WPo?eVunL6@WM?t_hC}9G6vxp@=3EXpV{}-5`5X&82w|l|J{5R}(6dL_X^g(+ zLWbIPO~`9mz_&2J9C(O$Ui77slF|O2Jzd_>{xYpvI94QjPo7pfp=1VV40sq-d-hi8mAWZKrT?&_ST=& zq1JQ4^plT`>r4(S8vNBl=Px2G8pVt|N7--#a5m$ibV3_r>a%1rZV=(_5@FFvMRhE5 z1?@>0itLaMMw;0%1_3Z|xF99{b&GU!#pxde(|`*e}8=5pjG+u706WWcK?c;+O}0;c!z~ z6vg}?k++Sw=i=VILgzXW?p4(19qS*t%;?p^#(uHj==sZ=nA;>QSTB6uCp7-m7&AFn zS<}mYY3-N&Tpp{G^Gn9O^|>lHI9Y~H$%#6p`L0L_+@aH z3VY5HsbGxd7DHx(pRFAc_8bs#oR6>?S--LFv;JJQwxgZ-6S>6cf%_hshdNL3?-w>a zMSDwFeuS{+s0g=XU z4iRDXh_I-)R*L%#pbQ!{VEyljxKoW%4n1|a_ct2PWEt8Ez`X zpeG~u8bK!b0wsSIony(E&b!WJe3OXFeKcYPJ!!aiA!NFs-*JI@$5U9I_DFajQwaAS z;Bd&40@J>oGVrs3n`oqlyoF3410phiM7kw5EzAK-Lr1E&2#>5ypvrrm+j z_-_Vn6n3`JS~183UWjslTE?)!2bn}zA^`7LZD}q$dez6d_wEk$|7jqK1@3(_U_u*co(D^X)o56p8MmgCpwFm6bAAWII|CzdU*@a&! zPqd4b{B#=GWWSHfkyasJMr(ACcVgk^6JgsU;HSWED){FL`TNN>$QOW5CuNp!Sagy~ z7UO+Xx595Z+~*0uwD!Fb_P2t6HEi>;ywO6cg`3X9PsD!{-IL!}gKZrwPrcA6;E+pH zofwCA-3C03_Nsy33o=gAw1Zztbt~+N2X@dLF|ZSOB<{!JT~4}0y~|bb7Xv(%Mt&jV z2Cfk4M)QL{yi*?VX@6iY;HuM}Xju!l~7vqGin10fgaXrfKPB{3q2QPM<@_RA+je`4gp@-_1 zRFZ zjj*zCKYhG^#3u&0!kEDVE~mA{&>s){FOjx1D^|=p&lmPw1pRgN+{TCXpDyeIrg`}m zmXD=bG{`i!IH|M}{0oFm+V5e7d?)yiQSX%XSfR&yslpcudD`#ogiJe#PGoej z42^_#;T?&9oAz?FL1zju&2~hy%o8GxgT%A`LZsV4(#7&m3VG_0)*#G0+@m!)r7T11 z1%rXBLI0$6sLZcszZGy(`?iVl%cW`*(zwPU+-8>lQrHkJ;#(zbh+&!V*s%VSYXgC4 zA7E6h(sP8?;V`~P==rzM6Ch7V6ZRxgxypVo7JlhO$pYrnx}b8#Gy+@8nATgDFs5%m zbt7I?a91Ktyv+Yf#F6&HG%=sbja=YX&@;xKG~k)Qw#$?Z?H!E;&ki^3Pl#bYm31ME zAE%i%$n=691NOlN7w%D;stq^^nA#PoxStN(F6+s^nFzqnpj8yD@T^+10;H&une8<*#g_^;2tN!rT4dn(rQAO_Kk?kuaNJ)-2bk*Arm-3 z&5$Kd;QCAGxlN?WZwNP*W$0AkLgayDxaph9ali!=RN2)A{kap=Xka#QD)6O$GUAg3 zKCQEggG?20o(Stl=u82>9DG`1R)%L<0!(|1YuN_cLlTT~scV8NmwK81CG6}%p6q~| zzExL=I5q*_F7)dou&DoSabfy?$ZXhe2c~Bf632ZUgyn?am^d{$7SDVtb6TO#4fiE9 zgNU$PacWez3_6{0s*RM6Ixz{@*i)Sje21{%fQinZ2-C{Ir!zz{ptA;e*lDNf)SxX? z3H~Co0XiGw)XYbHoWI-%hu;>s>06D3A@~P&aGr0EQ+=Ym-$J#BD#%H z!S5QVc@*9S$UkYUV8A_kr%EC3xI*c%LMH7BwbtC(5URxiKSBLd)-#^^l8mX&@=zMU zP5a~{peGxc;#E2v&m6c~yuX1W?(Lw86QNoIWawngX4XmdyBG1ThWiX5A1m@0^(C|R zhH5^@TqgL>h_L=5p2hRDmi0=mr|4Z^e7W#T@$H@xszu=*?TbrgnPg6@r4^Wb0RF7@d}50J>+bwPE_dl?wf$JG=$~m0nG~ju`$4fr63+cD9< zPk}C@4st?xABE1npm^{d;M0Ec4!n192`Vn>@D~hB`_H2jxa^{_AjXu=c?te{ zvk87P;l7UJm;ziYp7nF!SD-Aa0RQlx%qZ6-sPeE3;g%+-J!ZXlR)xSuo1`0<_O2JB zerW*y$Y>)0)4u&=$oqg_6yegoweUn`XDIT29?~!hZrWp?i8erBqRLm<^d12}&FLAN zsOrHK@RNbVL_Y5n_S5WLCDJehGP^~%)VB11Uk*N<*Ee2es<)m<*uZ`3bka2sFAD0ZmT zf$`Ak22O;YN|vW*?O{wSFWZnljc_MGriJ-5U);`kmk8I`uU_w<`4T#bllyB_{)5*; z;R^ffMZWz_#D(%?D$*kk{$DW0FoEY8tJZ+;@r&zu!@*(E++`tSI#;5U@$=A=&G>gY zmknyhbIhEkR!>&249%l8FgCtNRtg z7WTX$^ibU$HC@>-nPp<(rq$2!^xS~|5PrSF?>WLw`bt9^!pWMh`p6|n+k)w8POysg z)2Z!gl%J=o`QLKp9})WhLvsYsQ;T~?WjmJI49H8Z(^Was#D1y0R|7kH;ihv8JD5L4 z*yaHxK_=#E7lZ1!46g*HFSmH`-w2{H)jI0Sl1`TC1dfzvu0dxP z;@b}XG?C}&-7f(@I?2CRJOY^Nn&2d5TO%j~GCfzTawZ%&Gs$0;#wDqhMycRCfa%+a zDeRZ}l}`9egWE3mOVhjqy56yYWV7XnY0YXd~OT`l;p8gsrBE|t9;7WEPGkRKY7 zRGYjGxDMDs&vAy5`IyETzz>|E##-s}C8;wtI`Q1Y!8g9m+HJ&({T`F`GQay#xR)es zqcNN`lu^mx|4rCWUwbQHJrTyrwi#+QvvEyj{kh=N85Ulazl`sf!`;h%n`Wq0VpS|d z^DniGX)Q-0$%0-3zk~V5h5fVwtQ7Iz*k`hnZ3!a;~^d zM?e*_w>lcWXFQlqsh*{#Qu3I8QkZy3;PXx^hB6`F2BM=i>ew%3uWaCkS*lnJOji6? zD0+;k+2O$b@MN`}p04g$Y8ioXH45A(Y={yz7*%*I^w5#LanO;RteQS092PD7EMQDW zBDElS%gHT|$)Tn;%PgQJIE-JWY{&R9P%UHn;9w*4R3)oL@io9+;9A^E#D6b{j!Xyv zKja!U?P^?9+}pvoflo)Bb+FDWK)JxqYt*!BGqQfnHR^bhX2i=5Tu(*zHQbbBeerNp z6FCE~hvyo#Fgy=_UBGS7p9-Dn*QgCxY0%>arh~qVSC}D=2LULjSH{9Y%Q%ZRw>8-@^81ms7eJD1J+sVs6pPa53s%i&QPVupMn__S;#kL9WB7>jbF z4(z8OuQ@N2{Rk5EbkrYY6HYgM>j&JU?@H?mCDwQ8C<6;09%-~f3{7k77auT{-H zx(a~Hg*|lqY8C6BC*nm}*b@y|+#4)(M$kfgmcK-HJ86iM`5%gSjfTAKI>ir%OfSnA zA1{cxPHZm+ewXqm%hR!KF2-X)@r>zc@Fd1D!iIw)T%$Qs3AyRiJ-BXQbKiZPTCziz z2iPX!_!ZR=;I)J3<5ne<-oWS5l9cQG`K=cG7Px7uvi>@MH`obmb*k=@4|yRNc!>PC z7(EO4?Sb4lVLyE)JdW~_)BpZDoT>}qg?u!yKHUhJIACKZLK^TJv=jz1c}}%t(}VY` z5O}Bvm!`W)o&K)43o@B-4-+zUHwEuewNrKZy72Bd1CJMW(r|MdY^VVLQo;X*)4UD* zfB4yYwv=?kJ745yqb^H@+&heGfnTF3AEZOcJb%651zyDEZTUR4 zx`G#k>4#YKunZk|6TLv;SLx6Z%CT_M0_r&C(;!Vca(m_i|6m4P2dHBKJ!#-y1-FCc z=`&;5j28<#V?}PGlB;lmzlCaC!NO)*Ybf`%)<1^)9gF?Il7T%w$Y%gwFXTTxEv@Lvg3J(p z^9z6<7k1JomYdiHn*6MW&hpf-{?9$u0@LTft61jp(*_}oi*lQ7p^~AkL#-@N2kly+ zvkUG3wjl&^w5=}}yqJZmm26z}A%=E5-z4xWg`M;%`ZDm{;2$(vIlxOq+z)bmJ>b7Z ziy+vZHlg46tX0iI)eGuG9xGnR2QRVw1QEvu@Ee)GRD?yxO|&fZ4^*_GByESgo+c-m z|0ev_16vn`=?ib;7Wr$YV8+K`PcoI%aQ{ohmu&8Y-?T-lXXseuZ{=q*PT-zi3fyOj zd`N@PcK9t^q!!OsvpgkR8Tb`&8wYfi0@DWt@>u3cPTPh>YEmT~@;+c1;OWKls#~O5 zIUT^wi`0U|Hug*DnM}`pv1$dT0*5bFJ-1BAbT3lduS&oV1phybBd7pqdf5x4|+gh-P(dAJ(L3mdW) zhxLEzzG<;a2M_$#E>;U^Yk;eOm#~Z%m1J6`eP;Ho|@=Om(bv4fM6}YlluB%NQ+#Zn!_>517jwN*1sNd#)3;RC=IUrvPnIBkZ(-O5fs}g=gma0XU z(U7qNe@wlpr7B)D84?4TF33l6OS%_$r3i~)0rInRsXC}HiT%=mZVcX|7`VevJ6@&) zy|QBPK?m}fn-ZPUfr*lbY`x82xW1w8UkMrUfk|3>72<;Dg?@E22C1^=B( z{R`8A)5Elh!X6sQYUgli06D{Pwu zdor27N~A4KNLDj{v^>mQ$W)7PU#CStkgo$#kF=Zp8XppBPFLl>mH5lT`g^TyXi+X( zcGc1Fm&2DSAGA~=k?~oeH2BDZd%iJ54s3j`DjE13VI4h~RQ5~#&|1dyG2e2=!5|-W zmcZR1Qi7((s#u0f?heLZgSr^65wX93`lQUKmS7;T7qs@Yv4v8E6|-FRwKH(vwp=Y+ z>}9`Cb6o7;uM>XH1w}!|1^#!!PWp6G(sJc@%xNErbu<52KU>ea#mtWs@`)lh8J}!w zgdF*e$A8UoRg2ayAJ~U#hnqgQ*~0uUsIRnwbD8XKLT3nMdO+XHK0A#mu{@1`wjz!W zxQ&B(;(+&3$;~ot)B|8_7xw=kbW*RPlx18Z4d0;t3-hCxSGYnQflF62@K|cOukeo* zrvX=krpRp@G=9Z0w3WrT!e0}XA%1NuR12{ZxEolXrpiz0r`#$b>wpca4#@@Caa5O)=?Z#0=2?T zE!LQoicibm3n6cZ`#HZkdW*Ob_Jn}{CiNcR&jw5%4^3ykbR>Bc)qHSI6LFz!co~pS zTB-W8@zCP}9xL(*El2J|npc89U5=gdzX$wtXbBW*p<-Y<7Pg+lZJ>P4m|9rvsF6D1 z{tDr?GoM;>>CoS}QXLZ0!~D0XCIPmt3e%6?_rPy3urY=Zu}ZaA3-FAhSE)m8ve1G{ z0X`(2Z!$kq2l(fTc-4qBq)&BOlfty@RcgC<=_+LdZ6kNXhQd|qpol!?KQH{=!g;I? z_pTP{Qvqr-^gw?vZ8d`1_+(Zyu$N_h!0W|(_oJ}?1re6dn3GyX+fNa<)k;4dS%xlvK{z5`MC+U ztsMNr!hXspO|1VvA}&i=z624I*B2 zpkprbb`0)a3AdAFvV=^nu%}bltbdpu@2>}b=*aV6;CkTmg$?hp{WajfDD>PY4}20j z-!WP-nf~5(XQs-lEo@to$UpR9-?%mW?r>PvHU3W`M6Xc?Zv?Z9G13qLcN)!OGXDZ$ zGc9FHVg60R&fUWPpM}j-H&wIz3}M@9+EUAWIvTx#F@4NC4Y&>THpjOa_$8s=m>;ZL zqdptrW51V!qSh)*J*@`t1J{Q2w;aQPS3+ko(yANxW(s?*=l@v9(5JnUSr3g8WkWU* zZu($!Jp85t8}CsDFpW-Cf?ohUOX#Gnn3?dK1wMU@z7#kY_`=hAqV4e81b(#O(+E}> z_};Z@Nm(u1qc3v-zY6YesI?9pvQ8b`)xk1*Y1zs;P7}zuA=8U{7YY4jz8(A|$bC)e zgZIM@OrJ_~fgb~WvFyporJRC)jo_1-D(G>;Z#ylY!aME)rX>Qo;HLxA=N7|(Yk=2K z9NEr2#_}4tH;Q~!=ZE@%j&yZG<`Vaj6z`9`lV}V;bCMwk^}8j`pZ#nJ!^-BDH6jKL=%OJam@2 z)Mp=hfD2vz@{=w)G_;xJZx(T+k(hGIkGPjfJvschfsD`jhOZCnKf0GLDp#G*9}E6$ zs@LE*3RrI=t@rn^^N?3F)~mS>YO8FFoz>sw~`h;@)hL&kwWTB=B>DZN{heJm4Fj^l<|>Qkld0Ul;aJ zxm3w~>H)Vf-bY&v8IzhiJkJ`qV})%MpdOYndiD(>k*KykBX8H4cR#uq4H6jz?Bxk83&aKdy z0Zb$M-QbsJsbdc6@hoby)Dr&!4vWgh6v$M;y_S|uGoQ8&rXl^CveY(9y1c+-f(vQY z0Zg9=UgT3XDnah2y65wvO zzhk4Sr&>8&>f@ACT>$qy`N;%YyM;J*flp<)9XLKaOn-kvvXx&t8mtxdmmO}J&9gCo zzwusVt9khn$fReh_FF3KPXc~h*i46QyV>t;B3^WSMlISo9^9i($VS0$F>s!cnJb!b<(!~PR2bVUr{@}h50lyS%det9&Y*+s`X}nd%lNdj9Iu~+`C=Q!ciJ7 zMqO~T`utrQ`=ycEsB6)txH+u<)A4j26E-Xm_79@=E&IJfyi<3`b!+Um0&?`3=t7o1 zE_Ch_VbL)ebu9B2p+}<`UFOHq=ssf_Zw%k$ALr2+dxT%wD&NiW^vSqHmWiU{T^OGb z_N2lcj$59S$6=_F*Lm_Z}i1l;l4_KE)NvHnd@t6o8euF zftx<%S`B;bz|q3S``DPc6bp2aXqEZGg@792RYv z3xrG!+%#&QjrYB5v-)I6FLV|I_loqVS>i_ads6Ue>?;C(+i{P!WHi8r7T{%+55e!; ztj2t@!H;&s4fz_B0};S9-dhhG2mBGO$#nbM=T^qgbD5Lv_J1}l)2*IS1avxp8~x_! zBkMNkOauRvu#?t`G@yR3cB`>Cx+>i2(_l47&r;yK`Q6L|rv8N!w)@;_Yd2kWz~|8c zoRDt=-Y>#kDDtOK7ewXw*TB$4Wvmxz8wmbHdC0C|XO91Ppwt|7%t}TMzYA=0BHT0{ z?m*fm11}Qc_KEo3De~!lVb%g_qD4CKn6X^Y=V67R)1q5pk9)K?;BvAl6abt2p!;lC1oQ-P<-Qjkjh zTz^Zvim^k;&}dU5-n^1r|Ixq&z#I9^$OSfwG(TyyR&v$H2K{?PYuVkM?F88Xyw_nPw4pi?Csuus@hKc;flSL;e`^(r;0l z!9&0=2L7BQ=DtPs_&v9%aQmoMVN4$cbV0rh?&+tk+B2@tXdMR2Q%@)sd7~2U^XU*; z_-z3;QR|y!=rFoo%EPy!l_b|Xh`7*Uq3yQ}OozBz)xmmnIc`-oUOi+|fD7eeiNZEI z5UlW4rRO|;E_raLi*RX`UkvnQgYN;(WchtY$qn})VH=GamgAY!-O3|9EPqfNcr7gI zESeFxXjhzP*Ni}CU)+nOw})y$yR9#l;NRJoWAl+zenaqfuX_dY%fK%Re6bvV$@x9j zt+V^0wpZSU?)}`r<)gihWp7P1zyvSBpi=L$w-7|ao=uSn))y<_Z)=#oeR=t^x6Ydz zc=XfdqxT>Ev}g}`0BhGZ+VxPk1#|=G9?&XK6JM}*LD4eXqN1F@#rbzwwU>jD0I$#i z&p844ci1#{YS2poYZLZtPFS1p=H@FmXnk>S2d>kj^tNr?Ni8hzTd>K>R#^a(=_;XPYHUdF=+Q5=bl5hT{G(ExzYxQX&W3)Jt;PvV1k%)k?*qQ#0o5$+dIKK;lOg=w7(qcEBbC(e=nq0J9AOwv#1m9 zJfrX8?mN+*0NL{K_HWv_VdYJDJF>I0)~{aaUcd1sZCd8mm7CV!O~{AU`B%;!ZSSZH z({{}0*fGPmV@BtW8C^SObnlqa<2hk-58XAhdvYxlAGkT)WT*f4p1aE3gvQUXqFv5Y zeSL>M#m|JkUE9<8`ua9xE_UvCEg2ksxPkk&JK&Xfi^KC<5+mpcFHRH>K z=1nbYi(Vtw+UblgU& z)>?XY&FD>NF4Dy}eXL8~1<9Smj_fhH7X`g^wb>c8dw(|NSR7{`}uw3xwBQdVHO`wqHkoi+p)s z4?;3TZTAKh(1NO+IzQ2zGe@a#_Pyv(LE+a`0!1$56iw-(pi9&(X!>Tjo-0-?pWt0e z`QU)B>r#~o5=}Yrf2OYdztwsA)!nDmZE&zp%4aGYB14jca`EuqUhV7$9C4FYEMMwN z<=`%U4^5i`VcP4U-$0*&Zf;ieJ@DDM{|x>o;otNgve*ZkER)8SwxFm7Rki%@Oj(Ef z4M^&iPp9JdT0b8>ykX;XU1q4}aA*}3 z6ry<%+4K46XHDypsOrc#m+Y~7CO^7M^XPg^nkiF{*BgTe52p^E$8Ph`{_0@8`<6k0 zY|~7eXwcUaZnN5G;#`Bx`em|P?Z_X&HXCGeNsq}gCM-bHn*c21Xg5{F{^%)jg-_Eg zx3fLojSh1%;(o4*SIisR13VcjeJpnwGRkHpW3x|wbUWg%(#KT9>8!2O^fWwIrE|=q zWHZZ@D4B5MSq$~d7-3nTr)L2fZ7{{lgMc6t?YZ>PGe}0?wqmX&8KkqeG2=vy)stkV z#RD0hOW!zSWU`TlyCE~?GuZr>>4rTH%~KJB=XIB1&s+V^%VDzOdCebg@9CRpws=C( z`*Ge$`yVa)^hYq-X4Mxc7-{G+RZtBajE4X{?{fNBH*Pvnqj{$5c}(*lZ@^~GKkw0` z@n@sMy?WefGcfGYYAF9$A!FXV4H>(uDY;e~{H=OS;e)h{V>N-E3x`ra-%(<6*y?ld zED25ye@GkT)5rXS!};+eBJ@YyHoN`K=3o~dzxfaS@sA!8sy}`YOgCvpHgf5iMH@to zWJ-d!?>YBa&B>`xE*NirwQtY4ns?2pH#0O3RsG>M6HG4%w*{}aUs7V#0?onGrfH7Y zz{r46b8W#XroE5B^kW%WM@>DNiN12K@|;MobGKPMXIPOB^mo!qU-3qo4j*LGCy4C2 zj9oK(^}M3Xm@ih1*6_ErU}Ze+YlXqIU*}H5f!=|^myuzbC4A6YJ+EX8Rp}n1=M_!g z!EU8f`Y3{fhWEb%4$WR+K^bAC`~>dctMo(&AAZI?(+6o?e+f<-PVcQ|YTvGTvd4Jv zdTIg5)SH0KfraKl+OJbfmJYYq^_>aUc6zrdSj{@er00X-RxU?uwDH5{8K?5Do)3o3 zH5m_Aw^_wqqla4=s~+x9^K~XY&kYZ-k+Fz7N-Df>n%%0OOKY{`9ayNsdUg=K1A)|j zS9y-Y()b-%PU*A6QfJ-j@V^6k`h<8goGM>(*Ef8RW=9gfz3#t`4sn&NS`%Sh9Wo*Dg;|?Rtz5Uhh`iu6J$ZI;fQMfgl!Y_l?KB)g? zFikhxw16cjq`4qBtIQXuXLP4#MIu;dnMrHKnICBtdd9*-vCw3N!x}!@7VNRwAJRsm&gN3~I*{eC3WJiu*P#Ak|C{Eq@iU5U`=b zRB>08;kY*j^;d?L)dQZFhYKCNR0Sw>RN-aLgr0akEPgq4JsR{_m7YD?h#SrNi)U*n zwBJ(NJGEf8{&Lzi$EH!&YQYYx_7F13#qQ)1&9qa``t>HOO=J{IX z*OwTjXEYffuBsUXx|`F~v~iX37^&tT_vkKvtNta(*il1jT;l7kR z;$ukhpJ5DKXGavao!!h6~mp__&#BMWx73uV-#g zlQS93u$Rudu1>Wh#HLc>yRlfQlP6onI9b|S6I`MA>mc!N_%(Jcm0nt5YaT|&7!>IB zS0#oBixt06uEc;CgMPYD?{LR>wmyC-8?_R25(tAJ{8+J&p%1uNB=+N{(!Fc!GxOy+ z`TfSQo$<-#hDHAQat-GU?+M4pKOl4f*gHazMYH&hB-_CGsb|~$I7vQd~R#7E< z=Vs>w4}lIu-O1N+J3}ojijXcLW@{#>hvx zK8Ncvtp45BS4p=A+IlQ9xp4cVx#Lt~f2c5ZG3aAUH%sG6x{Sr1(~d61kimJzt02qH(kFvAbbstUAVLaN{Wk6zre2yd*vEPuqvl(!k-bUr zpK*i0xsJhy+>LF49u+hXmBDpac`Vzfd$z6Q2Mi%O{2z#F@Ecq$1tr*V-g~7IR@IKR zsfr}cxil{M(8W>9+q5C~&ei4F?QOm^Rjqo(m@T|Tise*qNV8dLF(_xk0Xqh}R$lN8_KVSFFjpaSwyMr4mUL2K`vaSwwM~b ztK~w?JMsrte)oIt^Kf0&*bq<3z+phZqz!Qhx$(@e^?!E-wo3jR_+zJPZ({Q2)G*Z} zjov4RY>>r?X`&1LV`h^>A7MTz-;%4aVGX$0wC1z~Sfro!o-{=0L?H&tbxnmYCU)t{ znI`@!{O~VcQZCz179npNUa`nCP`C1e@>sYr&%ccgv~^MD?E4#x`8S1L>uz&n)v5c- z#hW%xxP;_tl;zrKKtReE-QTLjy9$1XFI8^|c0j}jb=dS@f5xWBt)&cBTIBGgkucyz zS87mbt+?ymo=avLy;{MXnq^R_%jI#g50>vz`upV_lCvpbngHAcmIAL>>owwE%TE(e zn}hTAH0_RC$|f>3Uv6-IAFG9UM1K%5F#i{f{Fo4gv|tzGJeaKV`Fxx=3qWQkaLjJ{ z^|f~~y~-9$>-nAeuiP7{=Aw#2A`;{RJvdcWtzg7cLlgof)2P3*hk&44MK(H1g^wQ_ zwL781{D-#`EuClUqO%vOaWd82wbTxFPu5f1L-U<~608Nv9fE%_MDS%IhzmE@FgJg} z>RtAUl?^;Xx07$Kqm(2fIj#%$aWRN4?R)ENqLpL>%M*{2?j~a4uBG*T+_m7P?%OX5Rx`j@@4mnPq+C>sEbvJkK$8oHt#i z>Z{rw)T9(g56kW ze;_GWJRsz@+dGqk=%0ZOfnRUZT(d3yaOPPxj#|eBh+w7^G#K|boNg9`dE4=NU$t}D zzL_T~PSpnzeU~hr1HH~UInMPNWT6LMbMIY@8U^FLw_-Ba;rNhiR_Ek6T?{=&G~BLrlrTwx6m_?tFConC@}g; zpBuCH#<|G);`03_)`n*FB{g(tA~clYdJ03Eh@f361WrLa%7!Krrxm&uLfoSls7qXy z7ge33_I{`yf1OTZw?}spyN~YH;XknC_F*H5;rkBL;aG*io_ZtaAiLDdq??JK)Hz2V zS52Q4nNF&3%J3@(2fP;j6+ew)4;JQZ)c);Rn=~!ZsWkg>@r^UWI$rWu;xy%PIE0HD zYC8R^mqk&%qB;upnQZc==T1-R$@!7p%@+Ia24k`=K;xImTc5&m3}r9iZ7gjk^YyZG zwX-84vfJ^!P%gjWISSgG?;TZ$x=&gOwr`DZTSQB-^HEj`)zVkw~L@y z1y+tx7wv}L1W8~p?<}`PS264Oer>ZFYqn%&f3RBC6XcfG@UIGs^pZ4ZM|YdCOr*G+ zjopk?_oD}=;K(PHvK1jb=d?Q>(>|7dH`YnrxBIBzFj+Tr>`BApu(gP?vN9KS$TG6n z0Uav^F_Tw)>0LG{dmB0f2o7uh$0odc*Y}nbk9vD*uZueaP(|yju z+s$4XXOD@ixS$w7LPH*eRp^2N!3i$&pe7%I4wt4j-!aYbC|H)d>Ykdj6g7#M&;?tX z9-S68g2d0O?QvpoAGMi2Q$(gV6BI{h5pO@=gBbYd2&MCC_^f*5G@UZdPWv_Qy5zRm^PTNo1j<gixp$t8Ycu3Y6rhGr1O}1_I=xVt>dQYs&4k%SZ*k&1A4HN zkghdZ0Ma=O_wSQa7pa{RqUfJJ3j8!{SN`A3XMDH5`zoXz*kiMkXXMn2S%jy>< zhrt4Iukx2{~a0$2Xl$N`pnl977-|L+294vw|}ASvv*x)`AstnG=pJzEPmWMo~MT<&-9~w>Lf^ z5WA;Ft^SDVO-S%)VPS2rGT6{fT2yfMx4A1&SpcueFjG*Q*>3uN{DhYNvLHm!{vQam zD7y1u)Hj{e7Ey-!aNML*{!K{LCn}5HtXw}QUdy_gMs`ajnxwFJI6|gKZ6jmzM48(+KYVvv?LM{HoQNN`hOU^hMW+qoJq9^fd;53UJ)tWw@hJ zhzuK&Bi50rA3AI{gU1rFEZcbr@-%*)$SK&65JT9671|+hyEM~}^gT?G{}lzT(?J-r z(0@tPB`MI`n2G^pZ}p$>r{Ow4e;LC{25SJi_vr_9f&c=}#%694kx-IN{GqF2=9o=Ia(%0s=zi_r0 z=cq0c?$Um3fvW%FTpmF3{5rUJzmSiGomgisuLCvtqhqY$>RgKvKA2Dmm8S7$D;qM^sHaAzTY zvD!P9kE&Vx&#b)%rOc!%NR5bGA2o$bE3Q3XIrVAu)T8C`knE)Ivibd80ZXus2>b~Z zKgj~L`UOwQwP*{oVHh@Rb>*FMFMJfRfGL4Ek9$oieFt#TRnawq$NS$8HOejhr*b1! z*Cg9q@Tu?o3hdxutm|K8?fi`|25*zMreZZ+9AI2Au~_q;`E# z!BGCH-_u^P%kt&1T+z`_eU<^irz&^uzzbz|<$-^J1I`a(K0R2p_GCM+f{;WRzggGq zZWr$oAPf~YX*JJ}uzJ(zO9_kQ3wr;Xd~sU97TR%|PRM=ZD=Yq4a$?-H^}4&OxI_{` zkbUhz99NZ7)d_KG6)2KMTY*$qJaqjQ()!Cl2hJ`g~o6>xX zjjWh(sHRO0nU@esq)^1cjV}&S8++k3^zXbVt~hzvCBfqAtmHJ2DgC3M7;WagHEFP> z?He~pU)cMI_R1wK>f)h#?W*xw5aq=bouTcARi_PdXWJ^2Ke{*iv;%BjN^EmOk7wGn7mrIOwWvyP>&b(F8`K(MK$M~q)EFJo8VYV;N(^tXe3?KKoIJ6MVb}TFg z(7(uI_Y^;UJs8+CxEBZVT}$uV-!#9M_-tn5NZmQy`+_ zM#`YzIDcoVD&pmKHYqdCD)zFE7Q~uF#M&qM@((t|(BSUo#rsX)P3o@=5sAeD@i7|) z5o0kF-(F%+rD~@~2B-4k9@h>Ny9~xVp%lE}hF0cV{~z%=4_PZUmpY`1or|jX*R`$fK^~JFl7vkK>N^PsV zzNH5K3y0^Hk*}>t)`0q)q{(j#llZr02*G*t1Xg$eQwv{-`}AeOK&P}jCI?4B4r$o@ zQS7le_axyFTqM^+Lx-{0bPcD#|2T%7rkty@J-@jDMBxV%yc}bJ&qzS z4v2#R^)9NeEAw`ZTHex&C(oE0gQv4RX(xpdIm>&ma%Ohxz=9oanW{~ztZqXSESiSn zDM!!SN?zi`stX>D9T@b=!iv!gFQ4XT+@f`Uk)R!5&*1gTc8!MDY!X^qUJZ{ucj%B@ zu+C*ggsjJHm>SM?>^?s2o6+~ALtCwH+$QHc9529rlH95*kEdIt^uGpLWPpNOn*TH} zz1OT~8ZqWCwV4gpM6P^!+U7&3Eu;7&BCj_NmS=J;G*3RsabhQE^(KC!>z=kU zpSXDE?UeaV`3MKwOZgO2q4rjo8;C3(s@o!D*CV+#@nsf0@?V3LE9;$Dd5P7+<<}=F zS#`dPUl&ws_5hJ~ntn=1BlcW&m?;o?zP9b@S{ign-wTycY9l`IhcwBQ_a<(VfhA1rs#a@{Q= z@e22P|6Yr}@s%>;rF+w@$MRcD|4)~w7Ff^c*Tx-QZa3lPf}bQGG_l$q)Kg6)LGDK& z_!*YGcloYXo2eM8?le=ktWu?sQf7(kN_Y?_Q*7-2{UVE!rOKdoemqg3(GJ#jIGe&- zNSnQo81~PamzSoKY~kel+1stJG#$-ZdB;aS^JVP6bf;S9W&c^H-22A*)wFrNG_h_B z%baP>d$;=OvMV(G^^6?U5AL%3>J7z|9(w%HRU+j6{Y_+CRlZL7*zZV=sCRGK5+W_^ zq$6(*e`Sqp@=>1~BXU0H8h(hF$J``7vI&)2@jmJwIU02Sc{I51!1Aq0EtnIp^OfFd zqMa|s2G-Gwd@QuVv!d0*NAzvA1okHvUqgdQC%62lf{A_I9JwaW?a^YEf_B_jEHp zF;{r;)89rvw|e86?p`^Y{k*=qYGYdhBuEmwa-$H`MJkfHc6Rr$S`x;}WEXx0RnChn zaBhKG1LntbE=QN7=}ylV&Y!`Gm7*T{fMsI6={%Ha#WHuu= zf8BKaGUzq*P3YrdK4lQeMDD^-+p1_Sz@K0MhO*p6^W=*p2Ybw{{UZYGas|h;K6fAi zc*Jk`1yeP3N_>Z*;5GPDrlL#rLkGbn8|Ef6mhU(5@{?P>s+akw!4MRXpp~Dzl`AI} z%j!E7vQ>>^?Ty2?K)Bh9fm;1S-2TZTcaiz}eq$=2x|gL1a#*ArC|ZFc`9Pgy}0 zLNWi&swCF;Sw#ZxD!+N4mOa-4+lpd%pl>`q|apFZ?HU!J=0A@Zj28IW8*FjA!>n1N;3i zOC}H8%$I82V+@S#_fM-t8@s&d$)$r%xqXnp@B#^~?d^YtthuiHIUjuK34X*frXU)4 zCwxCAjwwx{mMme*Us$K(sI;-KxeedwuFIk-Zs?srMiQ?MYX4?bCxoJE!H+(kTaU$< z`y?L)&!~3PTbwr7zS~&vEbiOR$udR+ypgL)o?jct!{IC-qnq*Ei{_-c(!G8SvIgkk z5Ph1jowh*1CBN!}$8c_nAUV&un(U!+ME!ab zp9l0|t1--bZxD5~6_L=!bRy_-{Ljx>Z@t#6|xZ z-h2Hw(Hv<~%(udqigS1dF3?=aP>$#8{YeF#d(#{bm7GXdMQ+8}RC&?J?$@ue0jG{f z_<}jKX!>2q@=+4c=thyx0i8sLVn^;uzSQ@&>s$h zG?|Vuc=j>;oJ#cnlBFKPyiY{_91!T21cG2HF{aL~+lL~-s%2e0=GP%8}#sG@ow(&RN_?!Irf zQp;Cza)veR6ekm66un<5`iA;CvpBV;LdCgh;MT9VXzizSZXG9bQF72rQus7^2A6Ee zc}VF@W;z@%l-w0BzljDANWBi6j!E$eUsw|%gyNy@`I16_;c9TkVoQ=BJW&?*$u{+@ zAL2WkvjQ}o1!o^|@52hh=6o@i%ayNH50>9*sgqT}+rQIc?qXoVjGw+7+<~)h={gP@ zE1A| z-Q^dG`x+Fqen5b3$nNNu4byWp8+IW&O|uMAb}d~xRg1_Vn9eB4e6W-NMdEfhzxW2P zr8+o`T>J&13wcoV*?z~Bi$RbJ?Pqm`Ufv-e$(+}y244VuxFa@SauxTd{y~ecgiw|5 zhqYCj9R?o`)@p7~t7L9Tge49I!+<`1Cd2o3qL-1r`lHNB2#&+Bph)%xjBDKc{n;vh zO=;4^PJjiY>=#vWqll}@r{dolcw~b?sOkmPt(Y*sQ&vP2Mxan$pa>w2SaH3u{y{kR zQ|!nn@+3!p&v&iO?Pa^PciVpSk2b%KW*4*2b$eIEPjY~Qs$HcZqf52N_m-o7x=XUl z<*EsehASFaSo(3%gjlL;j5_|v;>{Gl+aMXJ6pOTm4UAzJPs-W zQaS0!(9)f2E;49$;$J1fl4IXm!J)mCkE;`=d%hme;FUKr8zu`NP8tbZOYOnL5)6}% zC+EO?rMWHSWS+f?N*_-n`1`7zdI*ylK@0kCNUa!z+re-VAE0d8;<4J+-`za7PgjUe za7o}&jnx-0eCg$dz3U!i+GZ6e{nM~_xf^}a5`>8VL*ef^)8`R7LL3@mYlz%>{ARS# z_Kmg9^UDx_eoiu#|Cpti!Ij_h{h#*SYF0}*5y7vdi7Jr$3)TTQm!D>0P3S;_OH;aZBO zO{1117I>r}efPXik30x>yG!o;ANlJ)tL2ffT#x{#uWj`J^$ z&o?~0i5?OLdZgI3qzU2+5Ikt$#29Zeg3erfX&%Y>{t4*>MV|WkVfP1I3&Qx_)5-`pr8dE;iw7xizd zBlt$wC|1e-TD|vZ$9?!qd4cFPU~7llLD7E~rt&SmaCXT{ve_sV8m0?Z&en*6`$id{ zb^QguEaP-FSa})`{C1k`x!}H>9utouN+|SC_2g+#S3_(4egUyEdTk+^s#!ph(D&9r zhryw%c)#W`mw#D;xV-!8Efz>DmJQyVzLc zvz`Mm*ShYwQ|L5swnWAJKepyGqfipFu!?iLD69I(aD|Fi7}MC*UEhDQMYgx%%Da>`klE-D$+T z3He^PPFIqMm9gC6j5Y#Ws@j?Mb7XjN`@MrAfju!rZDKF($_0=-|!9+W$`K09S2|8N5(2vM>ruhwjd}3V;b(;4mk@0yH zN*x=)y=fq4;-EMFdB*CAx?!&`^eVV6`2W1}xtps)s>Ow6$)5%NC&?`qeC&v19R79b z-W&koMlSG6+&gPhmKuQW^EUeTV#$x69Hoz?t5nFZ)(tZ{x?tmE*BWQM&*s(fOnoE# z`Yu;-15mEr4B^N@bUMpo!Gh7z_($A%lAPuzAKZD4W~~eY~79NO@n<_ zO2_~d8KKIJG~&~&UyDBSug@&ZSP_kbZ1V#9AxIfKwql7SCxz5-w4<++yG3@KkwphU zZ3_CUOC+gw_2x@K0XJ?-!S06k`SIhY2AHGjR+rHm%8`(p*g7J)^hcsRM1I_&Fmn0J zvc2m~?by@w**YYPXb}?B2xDUlQGc^$^KA*|wL(koxmsz^J^;q@q@hA9ROHh0aThE@ z^bTfZDRW?(OB%G-EK$TI6flMg^6|N@iOCdDrV&2R+rHF7VuRt^zbA zvQBix99*v%gt*%GY|g6HV2Q7xCO?h>gSf(csta}Qrk*G5jqwM`f~NZ!JKbz0VjYly zW*EL-D@6JxEz$R(754+r7zq-jm2}aW#?>e@Z`~)IJMBuTFrY}uc|&q?>t+C2(OfxM zrEC*8oA0!?BC?qYFsC|M^(!l#IA*|Y+@beLxap}?`~%3vt??#QbEQK)+wqeY1<#w# z&xb*u_2G>l%pZ#*zv+4Fb{-^N$A)?ANw&mGQl)uWp`oC1#8gJ7zhFN=^Gp^A3gbAP zugqS&FKM*IY;E=41D11T#_Y@^Nag*LiJ|$z!}J%o<|M&JpMQJYzHNEz{L}339@EvV z=3f@i#AUSHoc#5cLBr9f@OrKCm=y^9@4p-MThi*g4EC3Rorw>UtS7^*b27XJRl*0m z(_%ciQ#{wxawVI{ytsm$vTCm}<9)7^Np>U2VwxvMVU@TPy<9t!zmTQ>=qhB)qWD}) zL0*z~UlHx%ur^W##PWH`$;xiHzx1{r(til6C!%nZm7g;YxFNVcD*$?|x%T z@h-H9r`KGeQk-AEhwQRZoBn17{E=`!cnoTo3yNy9|8H78-JwG&Nc`_ke%}#8h&gU} zFalisVT}T|6Bl112fU zX1n+dAv&M-9v}Iu6Tu5S#4V(Xw-}p=> zt0F(iIOQ6x6uVK!0M({F8cv=c9EQSt3c@27n2%UsAX&l>D{Q(s9eh2v+o|Crj`V(X zko)bFoBY_GLuXMr#!&t2zw*yJK4(zyz%p-NIiz4QwK!F=8TMyYpZ9(r9e%UHX2=JO zKHh-ME<*@%TrejUepbrpObz~?Q1$5(mk-}7Etazo3bNOM6Ud$CC<0agyymNn9J{l2 z9$FUu-|}qBVN7xvDB0LwOgy)fH{ByXmo(EA12Rx3cA>aF$F_S44>i&~0yCmADKGm@ zmE#tETt=N~TUB|kIeTXq{9jkYf!_4VRFV5)h zz5Ru`1uOVhohjUTnm@9dy59Qy)USi1i4%~$X8d%b-pl-p2@H&To3Z$lp0 zj{ZHL=wK3~+IvB}Cpzb+28J_D3qP1OR<#^@YqC{n*2jO2DLAa%7XDFf>3QSI+Puyb zSMMj;nyX$FWH#0|4Vk<=VluKR=xjiHjNjmZ%Ee)gJBWXWdAcoZUR3IDY7Ja%HcSh% z7tCQE&gzlT<8O{o>l(_*FU$+fHj-A@V+pa2Uf3NtkhUZsvcRY<>>z1s!C3mUE@FFaOdB}48+nKbl!#0SC`~X4NFyQ+?@P)0GwHQLShT{^7`E-b%LmV;=Uzvi1bz}K<#^swxjrk z5UB({+&M{RkqXaYel%81_+g6UM`0|T*W=a-W!z`kL6tRW$p?emNBr4NgCM$}%WL%b z@B+=uo){*r^#)m*%1}*-GTND8@3d@>$f(>gt!V-eY|dt7?)Z>LOwYq=O6u2*Ul7du z$PRhq*A=9L4t07e7B9zLof}e+&YlaooIBu8<}Z{3nAH)7PV>drpDCX#Gf>OClxr5; zdo?A5EPK>r=+Omt08*(4`PmpUJ^0Jbz9loMTM*rEZe=^jk6ryK@&4yIo|&t;=>DCC^R4?>)cp#TcrLsB%Ijcaja&QIz(e9&5rKeQuch=`#6l&IqtU02`QrNHua zzB-TiO{IO$0B~=yYPWvjl{aCwiEq=qp?YK{GnbRsGJ8w!-z=7WboG3SjL)C*P-CL2 zDfL@(0)eGM8Yg*%fOjv;jm2m>S4S40J?Vsk)5auL>zcE@iWd-ywwB(GV)tQOw2cgl zQFUd^F52;?-?XUwnlxx4VfwJ1{)4H3%C#o!uDRh2BAJD){(xx7K=xYAkOm_s0gtpl ziwmDhb40wF<4U4f7o9)#E8R2MTo{XI9=1U9C(4wws9=TphPr*AUg3#SPKZ1pu8rm7 zY3Qt<4A^T;>cULAPrJ)gHl^v+bE)wpBC4TbriIPPC@&oP95K;EIMxK%lc!E~mHPcK z0S*KijTP=@_$G_QOXlhisIC<4=r<>)_IIgvssZFjRY zyUzDblW_NLQm|f(R1Mnzy&L``rw`;`l4DHdb=~7zWEhUxZ(%lP@P!yZZFFn?H5CbS zVw@c^WSQbbsfwUg^tB~)S(ZPoTrlk|XOL|jSeLJhtQ1qP(;%`Z#fOUcY(VF+<>T_W z?@@Arwp)L`d{5-aH2JCiPV60zviV|dsfwpWt=FEt3bft52?-*Si;iOb7vF_%MwGgc zpUgCJc?Ob;xYhz+{Y>E=Z+?b*DF$Q2)!n%GD&@nhcqyf8!6?j{^Es^yYxFFqTlrds z47J}jTfd7(Yd5$eQ4n(f!Rdtk^xdVxRBh|rI+oG$biaq#6`JT)FTUQ$+U1Le7&{dt z3;%|^ne)CLNiFKX9-#no4RLT4!o@b4b2HOv`I}*oA0?^e0UN->omX*D68DFJs$D|e z;I+h?SVDefg8JROAKDnHsVyT13$@z55;spDH>VljT<)3Xm)sNSPDkeXB7$rVyiL>Y z+xg$M@c(Zj^tg?+$GJed?#6}UP#QT83+ zr`S+H=*M#QYl&`Ah-LiOkkB@FHA4z9Erd`U7u4nxV_eAdqaTsC+#x^cD zVgm;Oy#t_Pw7Vc}tpIKW_l}~IJG}C;_5Ke9UoqDBmJ)W+)Cz?u-sNwr9~aOLjJ|vS zel#D?&~^1tyT!g^hUNN_0GXCp4nZ=aXcKR3m&yd@9-xDSG5IXvDNG20+P>gXIbsI# zjO0py_LbmUwGlWwD5C)juK7YF->OeCznnYYvyAN@_O*%~?li{e`oz&}h7K8~#v#8R z8-}2JOsXMXyX|7Ud{(@T>dUuHOhSy&DAZC)b$%Y_%3iNAGE@xKck_sCyh40P@cAh^ z3b=vG>EydE{kk!C;ge-s>RY%BOz@^Km7)E>Uq?olcYtcpD7isUv{BwWL0>FU{e0#F zT|98wV@WVclH@s4gyD+d5cM z307Y@jZhL1Whb^4~#74 za@m+1%@+zwji#QEy{O_vqw8x=n;`x+Q2?D&VYLr8R!4Ubug zqJ;^TyJOsYKfwh1&quO!#lM5{d(|RNNTHQ2u>B#Ur8w%zbvx_%r45w?q( zNm^+-t$hDk|NuL1ur>h)M1#7s$-;zYRxnjhYF! z1^dcOcr>UQy`&MlSnb=7v3)Z-c zVRN+1=~`=mlQRk+?izpVfY-Q<;ivIn`TXC!_}{2zT~NLM{CBJNIpUH_tN=Mw0tNY+ zQ7gZq<#oGu_`Sg3;u~mp<2BSeoNicQBF7bzX!=q13+k(8RHz8vJnq15x6K-$7N~P~ zu3EQ?5uWK{mBD>a)dcl5ro6?@j<=IVT)mNZdB}>eEtiH43JSHqmt15UWc7q@8_J~v_P_SK~3Dz zkjbQya4#p;#6#noc~|6`L0n zjqD`2-JL<=2Z_SPkBeo2htO7%*zSR#2xESzlwUW(FgQ>y4r&(oVvi3?7n&Wc zEk*wmW7Oj)gA1jDc$5Ywnz0#f^G;W{u-tW&TBxgdc*q68_{4}?1i7c_+rTY=Mepq9Xx>Wz93G`KnTPIV5C8 z(7}@bWlYenk&%7la^=~CTWd$r(eSfsQ>YJZShP$Bk+efdGCL-j6rIG0`HaTDh&Vo` zJc(hV9XoOKMzLd#lf+bA;R_lQFE0X#R9zX4Cy5`iK2WDJq1;n*oStb6%fza?avgh6 z#n16h;>V$vom5@L46t7`CdNF1pY;)mY2Bo5YTkU&sMc!j;NTRi?nBf_3L22nog7za z_*O}E_i*j0yZ+wp7LhjaA@(2S`l8UmB9zia>qUo(vZDs|hcbM^3mmvl8>9a0DWR)) zS1M=BfjO>eBSmdfg8~(B&Yb*#9NXegZNH#@Qw~_SHo2q(z8k>0J`_7r*x-Hn`O%WU zI~bOP@CHs8H`D9~tcQbZl4ns!%>d!d2Ylt6Uw+ZQ|t>?LG^CoL5Qo?=MdJZ!nFtXP>P3px_aOe&nf#r zBih7cW z+LHV7AlKMqe-0p#0={|0J9~eEI-*X}qAAR4Ut<5$4cqiawiCy#$uRsgdXGK3FOUoC zxYv9!XzNrn;tsy>sKPIEKbG@xRtetHo^$DPbpHjtFUfql`qXoev<115YWCaHzt@s| zw=jOrHD=|^w`755w=ixF8r=aiU9xX2R9O#{H)z0Ut=1;a^~H1?m1^*RUb`|reP!K` zv}+++I!T-}D7`aJ&CzO=3+BAU%AR(jsZn^TW-m9W!l+#C3)-y-9!xPk69MvnSzFDc zWzbO?6HZG=_5%KjthL`nsp3Qj48%7N`~CQ}t7K0$TEP<;`J_36=#HwVH_2=3y3~u} z4uwpez&`k>CAq(dC%WYU<@W~?-G$NroF@x~qK2*eEOGo}$+58-)kDNotpoHw3FKd% zuqiW=wOrYi&~dL?=BJv~cVc}?GABK_>*zBkPByJiPr^x~JMUXGs^7JCc^Z?o>`^^c z5Ja{oy>YGH4jY-#s5WfPmrJQNIAOFw3w5%+S5{b^Ic{cC4^fIcKEa)ojF9E{2k#0{ zSc2o6;O@qw#kkMHh+B{u^)0xaz|EX)jcT3Msj~T;J{GsgyoWJfn>aCPu&lya#=s$k zw!ht|`&d3Tx&1z#B=jKS`n@7V9LSpS-@gXDW3$W)g{7Ln=nls(eWjl#i-6TRUNWA8 zulF-BcyO}>cCuB<^EmWVC!STZ0z1jONacbjSNO|K&cNUZmR=x#>e_OT<@53sfc$KY z1F8RrDUjc|HJ|hNLD4HJ$2`S%9)hb)$IY?OrZl}6rtCd(b>`@hQw>?Fe%4Ka6u3*O zRq?58v`nyQL9FKr=R?svVF3B~1lZaT-7%1~$G*y72RHlaNmS^zSQ))S=HJ!0C!0a` z7nEUnl(J>*yckq+7eh+)VMl?{R-MnEc6FkMi~gnm$EI6VO(2Q?A1)xj z?t^1CSfw>;Xu0X_mdED~omRP>ryd`z221O#R~J9pLOd6HSgyZr!1KU$`GL#E(53({ z1>g@$Euj+}{|MoSNkbDj!M{s$Y*yqc(Qi3`x*^Ys@5>eiN?vL#s%2&8f5#|I{9 z$cr0ttis{(Gs{cp_T0I6&pJ)T$eq-{0~TBWb=niN3AU~7Nt6?8_PkMaV$wTAY&hf_7=o{`;Jy{PS$c1}Q}{L1Wa z6`FS|fE-mzI_7MVXmRfI4`?th)ZW_H|8p33#iniIBsDhz=^J;)YB)I=oAUS=0f7eIgU9K;Aqx4g@(uWj_d928K5t`9~yo z+f^ZSR*GWNL8FJP86e*0PG`}a_W%9e9HnS+C;Qc;0z?yZg{lU9Ic!DkrcAblEduM; z+A2U_1?#Y)hNxGU?}`%M9a>-Q{WSi(i$ANDp4MGOcP-E!KFEpZ1VfO0Sn#<01p=(K znqEZLngw6(s!#2y!&sG=mgvs9MT2X@w(YQQy%FQJgaz)(4AR4QmsmlDTQw_t&71a- z<2yvin%2#&{Hl%s(>ol?#V4NGTqyUTEV|`)GTKwhHfNzk@19=knrF+Cwut&nPKWwm zo(rkFy1ZO^aXRvGj9U`!@q3{;?{v0J4jE8m4w zLXPAi3*PK!kp_7MC8prHuYcl0YMYE3h9Cifwc>rqzx=0JTc6BxuBsU+5qD}7u}(eO z)~Az>VJ9uF*Q>~fT0#3Tl8&+N_x-z1k83{OWggc}ts z1vqF>FKLeM&hEF(`_r|vTbIiOQ%ch{^HoK@{Y=S?=vipd29kFtEf^4*tv_S~f(+4J zB+F?T)51+Xj^zu6R>V-uB3EPiRjk|&o;mO@pK~Y{WimzkLjwxj=5NiLR1-qcy3a}- zz)uk1m8_jfpRUgw-LoqGeO^v#*i6}ucA9IJ-S5FX;Ty)5X^Xd0UbA7!_xB)+|2+5< z33cxXlN}EAk3L6%mYU{`E152uM#oH?|Df$Iv2L+1F>hcBOI6qDrt;Z0s@z29#;>u0 z<+LC<l#`30bTMyjHe8);5wa2kcNXW5b?;s{5 zkqC$!$<|0oj`|2w@L+Mq(!vzn9}Xcf1%F1+QzX_WM@~Wrn>Bl}(RWw85OH%@tJX=v))RA;p@1Uk+1qcyJ({FiY5s8@zw~Tgh9j?JQa)= z1p7|Yn~;K{L_)4eX7CF=VIG2i_@xg904Bxcr~=$BocheXA_{^vX~))P1@G1cT+-iE zi<3nBL|w$Bj4~EX1o4F0`0lv75%G$p{`f9bQ3W_%WVerei7&`1=tWNim7uiKAEzn6 zAN5EoXpIPYiQb3vD!m6AD?*}@q9SFLpiY8BE;F_GE@Milh;=+us~DYW$LR(zK*{bf zs*wO3{6G|<6CoeSuYru143*Q!eFPcM9@MFlHJ=1318`9-u_GZ#^;$SKRviyrh4yB`K~igG)1?<2r1 zN(U31jpTZC!xWX=PC&@!fOYs(;E6PCe=l_4=Zp2%~ zQZTQc*uz4B=sc29Q5*b0kEsH0j^eMpnsj)d67V6Yhme-+pJ4*Kd zz6)|A%1R=!NT?DEm{m`3_=;qkARgz>gRn_hDgqSV`mH=0<=qZ1Wy@ZHRrw^m%Nsf# zpTkkyFd6@x*mav%f+rGu_fRB@1UGm{$JcRiuDE{OChdEuiU*hRZ1YOTKj%5R?mF2C zlylcbhGOqSW&s65KAI^U6T+qoCgsZC82P1-y#|K6O{wjYUNF@Ed?xL1Xzw1f)ulu81H-njxrkX(|>N z5u}TSDoP}PO7Fcl3yO@62`KwaanyNl}a=2iv?1$c44H}$R&ULwF;@4Sf zxsHGOF4rQ}>e6iO6z_ljy~o?zt>Qrud3&eC+%q<$D5l7@Dro=n;8?L~F4v~Kg~vjQ zPwG?&8a+bDKx`qa?hQz6w#jXGEElZvlhH9L- z^39L*?blR|m!*u2OE^vzme43v;LNodZKuVz95y>lb2u(sJaUhW9>TmKrqZscGGbb0 zP_QTGY41IXqK1<;__-$YB~^P{ zgm|3bJ3=k~DXD1PMRot~lxOSIR&n6wwujqnib(?d{_&`2RY7|?G`i?i0DCdCtGq!c z_r1px{Ya%vGs3(Z2UHG*3Hd2*m2Z|ZD{4M=t7&5{oD|bojh8b$qu2E`doWk&<#^|47N4c?!ac1xl}4P;>H}f@iVtGz=VvgV-ebPiF-OR+&a~Vhhr`x) z+sfWb9<4?beoMe86SxXi-<; zl`$=YBYFN}*R5uy#<={-0>S`%+dZRvf&G`q{MuksCTE(3c4&JdLcqxHT>OHqD3iT7TscCA=-QsF(Jn{!~LzC#@ZnGa2rXh8@xX* z9&wBn;mjE?Z^Sop2rY%;+V-}J(u-^*DIz~mMPsk*`-ZJ6WLqNaQxbf!!l(Bh?;JCO zxP4?>VpDJD8NbJuY&yT3dxljJmDw(5^D0bP<1% z%VWL=3Z{gw?r_0~`nwpvdgq#z! zJTjWg4<+-WGJ$R8(&bt$;;PPotR08qv<0Fnns_}_U3MKoXYoO~MJnrw)};8K=^k#; zDwMVGy>QAOZrfSWv626u2W1waa%UDr)xL&}SqwQQD{4<~L$9dA`6ntto<fQ8p@q*`H?$A%1BjFLMXNTNo$%=k$d}v>=;tN;U z_vg6xfkN1qGk;0kx82bUF9*A9lD!YLDIHXraJCa*{nGa8hamJgBH&n*cF_QfN3)78EL2pPn=gDdpj8AOme+;HbQ%#$X3=k zSrwIzVeT1t$&oz5UduEO-Xa@Ws~i3b?|beaOg%HJRkgCH@@uPmxF{j!QJHDH&Yo-I zKCmGP!ms+qvm9^8Xrp5T#b?YR{JJVT$t9XY_Cv`Ni%@u((X2Itqr)x{T5JNQW#$>8 zh96MR3jN))U{7^CYucV1NHKip(sUH7<1&kU)KbUFC~G1`Rn9i;q6*Bm?i@7VYZgZk zFo99B<>An-BX`<#wjb}g10x`3{H<8B;}q!ILulg&qO znPl(zuI|x!pRA-it35Qf*fV!Zlh3^34R7nVov>4!|2*hC8>#eiMtdl0jkBrR*$geV z>1{CX6!v`|e|x(T3Vq!SWhb9OmRp3(!=ar=irO~j9`C(x8ha)|NOk4FRF<64&26nP z2BSIu>A7F41oiJYbZ~-G${^b@!7!sa**$;DzH0#$o9~)$m9|VVgd<*I3X6J$jr*h? z%-yrmuBzL1mRW}H`NyMNKNlaFw{>}YS(bF`78p$KsS5rgPsiTgqwv7MBSh5*IYaM) zEy|O9=VEj3K=wZPnU<*nm#%z+9V!k>>s9MmPnR(%uTvF<^}g{0KJpjQ900cA`^v7#2tYIw&%V)P+(V z3QLZRHICabAY&`jD6;+HG1puji$5=q`P3y%8Alo3+(tP3sKPK_4s~&8W(JQt1^uA? z54_7w54R{4V_3^aFcuX2(J+Ju#E{i1uo`qd33D(W=HM920@gPt z8QYjU4!zE}Z_{<_4#iw{K7(wS zv8-;(33ccO%~&`vPI#{sKoehRIt^&idZ^cFHY!qw=vuwr)_-Xv4VzMO9>QUtYEB>}uOI8M^%XJm}0VHIIpY1#jdlFfNXjJJHZSf+{x#G(i<^B zgKwWk$q^U=8~4v>OS=4w)6Qr*;*xtNLCck4YTu{`ho!6yHPxeTu_B+wpBS^|RGIvK zGCIJhRaBT~@W)^sYqi&qtlMJQ(5Mn+ud|iX&rrR9pqI{`k5I7iw`<8h>d{8etyA4S zWa}OZ&9Lj1X|7Tr)L2+@ag15mPPH~wnEcqIaTj1~;q~@hfnyHHDM3pPXJOBRPVjV# z1S!`K0C}*91%MV%r0)o`N>JGSCTc);4n+HFJm_@I1VC~6garJPc-@n?d7w$qA$Q_C z^jBw-@o$SRH)MUGreUx^BhXI_LhHw&t5T~@FS3!+$FRelgQ>c+Fs@YO^HgWe{iIqr zD(XzjU}C}R-7}074-|dt-gM3~zAYKB>jtPgfUeL`z9va;40HdMeY7E372!nN9i}m| zCwCsXV~*bpsNmZg-1i-Gd+R2cro1-zVB#>$Kf5y&pMdqoPh{u@#-$ZtcX z&I}xjQLk9VTxckvb9eOa!Bnx299q=0u@SXsa?vN2=&-^3_9Sv{@8vst&fh~_J4#r;+QHS4P;cO|i zD|eGKcBp}?+--t}pnk4%5F2XKTLPWq*reLFIwx$%FsV3v10Cr5ma%BY1CtsoJuYwN zV&e#9GVW(Sbi%EX!w;rHG6uO2(mr0&b;~eOmrH9POHw{;pRQWNdL9AR8$L>LMr{W0 zh{d14O&a0jee05}I~e)ZacZ^LajXYKcMRq;|3By67PX5niTaV69uA+4^O35vh3Sno z{RX)c0A~iTN54KL0Z<9X%B7sAV^84NacK{vP5JM4%@AT&tYDow7AJUnzf~zvRP}3E za>!fdL4zn&vLdW!s%yBEtVRg}6#$blB9tJ^yVA)eEKO2ZtILuGhTzgvei+sCgRY`_ zCEbYb#^CrWwN@P4F&99yXgyKu4+6g7LMEJz+gZ_;?39hLPDL}9*rF)kO7ky73L;Lx zj{*9)j*G1{J=aGr%Pj-71D9+LbS9kq{HDIT{J!!4|rK0fN zJ$$X~`EgiEXgY+{nrznut=J+PM?i+(pN=6GX}Q_!id*znXCS%nT+qG%OY3sWgkAJ* znb&&=aS+8Mn6McCJ`@iJybB;6G_dmFYE?_D%TlD!4j;XJI3J-rpEtmpH+vosU z6J`rAXI6h@k+qG`UH|?r-=dE7?=263Q5pj31jd~Gyd+i2qdX1M8vtYX!s$lC^Y|-4 zHE0`(Tiv5MFe-qAg03Qv9%i)>W9eYT;@WtfZvYvB)lBCFg_rNJ^|w_ ziK?AlaX4d^10;86d4n7vr=Hu02na80{zBC^AOq|o9uVKVB0z&&+ER;?12Dv8cBwu( zf-t*!?`@0BAjAh;KD7xs=Lri$rMe^o(y|3vMDEfoLN>7X)^-h1d(HCJ#}=8O(TLDu zzJwRCyHI5Cs8z8DxC=2ba z%P~{jIFI+sL(4mDywV3#fG@+-xB%mLUxQ!Q_oBcS4mpyVV&D^Aink)gqOfmI;_~|G0a~QgXJsMi>*g9yCEeXm_>!T!uQ2u(~d%IEG zIE*`s&DkKT>Zr2O>uYQ;*%O1m^lP& zjKQP^f}YHLw+Y@sl`Kk!1_@t%5;ziuhTWRjr1zf(oG4%d=NRctn8*RO*wp!}4DVD3 zB8-0YmeZ?ma~Wwx@^QrD{c$S#dFxmC?~mWzoTVW&&a^{cw7~=-=t6G*6PE);MM4v+ znP0a)^eIlr{pKp=Z-qgT?ho9zeHGDTV%y-N6E=0vCcBnDdV-CNqnMTgXEBLS4o@`) zF+v+;H@Eb)@Y+v+e(25$19KgduXid~4+7eeTUZe}CIbENZZTwyaT% zEKKgQgP=yhUj|htF38dx(P-Ej;I9clEP9GM054;5Wf5ml*P z`FN&V#UhT7^61!o5*Gkaw{iB_gK4%SHn6oo^sPa%6GxDf*qI>gdUG>7%$8LdqNq&{ z9X}T6)`IibK{UlT0I2wl@-h&##ioMz(v0x8Xo?WbLDb7IMbIE!1g+fdR)lEXW?Bc- zYC(o*2BlJJJ32Uk9-$8~c3*QZ*pLD!C7XX5dTqzX(}2fZA~gb`l|w1V1^uhIs@)a&opTY4 zbS_*|Y)~Vqq1L8ib<+?w8iZbj_t3pt#i4o6T2z28wgCp7AqZlaU}wqJ|4tK?Z^rsv zNU_WISGf4!F%H@oCl;(uL^Z(58O4XU9U`GXwPk)xay9uyI+try4 zEqFWk)!BjpJw)jdN5j|v2_A~XPXS#nRJSx~Z2-&BI8d}I>M_y9wXjTD4Uv{M6$)S-XsdN1 zsMkQhKEsmQ8de(3VIt+JL+3AV*m#Lc2rxwAzx8WjdX$+%0bc}>h)+tdQ&3F?Rfp|K zG((|>o_h-~fjaooASwNN)f^!3W9uoApku>%zeDKw7U0aG9qdZD0<@VWF{T zA32*H=rBU(2hkJ$T6rrzs3BNV0JIsH*C=Oz<3RWk56rRr(kx4qSlJVOMAuDx7a>16;a| z1VB(HF;%R`73R3+m0C=3F^J#?Q{_GY1x&qA>Focw3z5BcRYD9fArQYdq2nTXSANko zM|dQv0s!HM9{4~^;6?J9&*Ay}p5M6rV2mtAGmy1{sqoMhSdxK0@>k}j)r@2Kq5svg zNi%==n*-F)LLJpCVXYxXml4eLZn`c>J$?~DA|QRjmWyS;M!^*rVZOIQ;E)GVWoQV; z?t-$yrG@5O$P+=tyfGFC8~vZ!g`K?BG7fo<6cIUa9ndgh={}giw7So{o;(N}ClCah zkGjMI!Sr~0mooK>88p4UPbv-!2fnss3{~0Odjpy|CTkeyRTsn+z zJOSF)$f4z@zpUEIB5W2JI785r4uPHlO7t6^#<+w&7${`XSm^qQ;J>>MO1>!1tK=71 z?l(Xqw*{iA%70~yx2i0URU9tO6-MDYdywA161e&d^x|MjU>dZ-|2o%zDZQI`)7u~HDwiqNznOMw-T%Drgk$Jog|F&V%^I#0ZZe^Ch$!CPU4-W|}x*@fy z!aM=oxCcjyK^Y-!=q`sxu(UMT*K_agSfH^4uHK;#1OfpOrGng!5CUkY=hb^|w*x!F zR!%rODG}6X;H1E$VITtzNp?pk@f=V;sAw>tqMR)^pK77(bVfdtFs344h{WuM)CFjS zsJtrsv}DUUdZ}NGl+f|Qz-(r zm#li>Jv1%AP1=0^a@l8d5W{tf=GYC*?+!_TGY9hyLGD}N(?%cxxR7f&FPOO4Fu|IQ z^YdfM*b`-9i<*IVuSM7cvlmp6mT%Rbg{&3OG0h&OU})xBxfv1w9bG<(3dy&KB8toR*sH^qHE z$qiyr-g?&$Rn0$_+foB$i|5yEm4iN=vJGQ_*pH&Ni4D2rZH_HMYXP%R{T$Nq7sK{{ z+yJv!KMoprDolUqYPT>~0|fq|>aDQhL`~>Uo7(Y$Zb8?78(j#D#6tkmYWm~R@+g$&w>`<@M1XW^8uI?>?GwIQNmkq=|rrD@*blQf8g z$nM;N*nK>j+2yS&((DtG4MOe-h9HC;b4o=G6KE4&-)KUKi|_YgmxAsC7`~U~NwZv{ zuW4#?doN&X$c`vK&J_$OL)K%#nMV!D+Gt{z1WPu85_QW4NOcjjqOOUmh-fNf)ETv7 zutAX&X?@QUlfK>GBuxcITY&)zfdPdLAA0W?<>~_8_4V4P+9A3NNkWF1pzXX$BpPJN zSw&y?;I3c4B%<=*xdn2QcZOZ)`&~yPZ zzQYhn94K~xFdk4ns24B|6klixflcFtVB>FV8&oYsD8C~@389Tmx$;P(KA0j4Smn;4 zP>n#)19wS>I2$3!5pjm1!?01{_aWm;KaU;dKL7}rq~2>2K~m`L#_N)j_vFA3J(z+u zB6v-to0EFw-Y|2LR{w z0F&W8`40VQ6{p$&-Xuyh2$ydICMj?UdA(8A9k^q_K9NGQO&ItN+9hyZfb(J`0d%vY z=iR`Q9FQ?VCkHVtnE%+(5e|A;GJC&_I0xrYb!qx<9ZU#aH`l9lmY>u z<2a<^q|%_}rP$B78LuVz2MU&8!ev|1`IL-3nu4#lA`4(tYKV$cpWbR)v#vm`;A zaVuEQ0%C^|-`i+`xrb1>(*`fy^MoN*VUdBcZ-^d2X@GPredweeh8$H!Y<6_;dVup< z)S>~7P&tO(c?*i%c@TF@bF1!dfks!X#`Q)7O?0g9pqn0BMFr68swCYDOMn1 z1GL41snEQA+Q2*zUMR{k0Fz_YLgz>JpC!a1fa>3CL`uR)SXn|gxZCWC$@dRrbN=}h zNmmhJR@v*0y4k`!!B7B%RZ1YfAodYifKcBHPeqVd4i5#OhcFN(vO-Ab8)ofmSK%q{ zLzvxWT5bXc3d@{14iOIcyHT|3v3L|RCcvR}1XD=8XE;f%@Pq`&B&o8Ekkxt6<(TLk z5l8GufUjNalVJl}4r(f_9T*1Jg5oH#wJ1a66pnKRM2neLP|~}kUtV(pqX350K}dkG zVl>E2{Jnx0U2{mv-Q1;^B&QGRBrw8M4Y4cQPJk_9SKL%V*2l0DZUbTgA# zPLS3N?HT~2D8gqxfrtuNU$;%7x3Fw`aTeKvKXk%!)v5fg?68tV5XNFD@%T-7y?z=| z@UD>5k|J-xc(5D_i6Nn%Xd!{N+Z9Y>V8i>aj6G@Op}sz21!MtJ?R>Y{+yzsTOW1eh z$CbwQM?p-fL*^V>q*O@K!D!|)nyr^dKpWCe*c=q-oaS@KTnz$%NIXDfV`mvg*xtIV2G3Bb6a;6@aElgtfVdxa4;IaF`2PP649v5YSwx7-*AM&Yf)A3j7p1 zb#*%YVB0*=PefyGD2%r5B_z18MguMS5fX)Rp7yvFt^>uoC0pX-p8q^bmNfuMjs;d} z5dLxzZ{r1JQX6Gw!WCX(7$1aSE#6QVDA2gm0sbGo*MWJ}Ncvex)JEDVW&|KK0Zj*6 zQt6H644e2mNWcKSV5vArPy%5t2s0G8z2f7YA(9Rp>&iqHus=OgI)p+VcDzw z#%9Q@fkF-$1ys7&r1~B6+?2;_`M!;nM}lw#gcx!+LKX&5-u{w&tys)n06R^6?5%5} zGimbQ+VwncHfNEigKWsPhA2`P3;Z29G9kZOARmje9$1ts8>Edi1YE(B1mFPYIxa6~ z6AVYzS|V3+l~H10wj@3yrWKcf3Cf7Uc8q|*khc)36IBKgMNB(gO(-JTHk`%Rz&z(= z5REihLm2a{6qfcaii$M#5E5WiFoze7QHaB#;4KKoFK3VfD#8K-(M4TxK^%ztRp z6_bgOQ3s(FBc-aXfZQVtj{%+ojEJChq_dId14~RPfqY@IW9p?pFQb}iFs?fbTpE+Z z&_j@Zg-|Ex53WN-Zp}#4Mim37t{IgK#NyuG#?nYtWmrg48^c>jRXl>yJjgNj=6eP& z$g(W98NyM!*MQ_b#JWks6=%_3Nkut{42TDV?FNPk7LzEZNEXL^K2995b%C`7F&R>P z`bJ*&9qz-IT42H!Kt_ODQIK>p`_=S&vycph;tz{@vTi@1sGjyU1J4aj;jr|@%uvh1=44bYkat^=N3l# zdhY^9vJ}eRn!6b!3zYhVkqSs2kRA(Ei1}OjZTo;w~ zes=ei0nOK{KE#akPwKsnFf@es`kMI&$sYpqB^rgzA?j6zbs+7pS$sxP=OH5jtOBK{ z9vwRiswfOD#H;Kj2JciHh~Nc~&MG7WwowR?M+|Kvw+0=2vqbP|OB1hN zc!qeC@3seB_Yn{K^dQv34Kpq>U^Zgm$yMPP0&9rxefw4A`Ot`^qyEQqAOsJj0I7hO zD~Dn`zhkudNBJP4D2EbR01+b;!PCEEnixd-Ib#G7QW+q7Mxlriv}No0aU#zqM-c^^ zK(^DFfv^IQy`R6x>mdrhn&QTqV+)!BrvYZ7)nn!U-QC)+??lRoDahTnAQoJLb_Pag zW8CM1IJYPe3l#~VN<-ZtJs%4SZ=DzucLDDJ9u3KQG+Mw3hivEIb^VR7Gz9rqYVC>; z!~zQpNa-yDRuw~)2uEk{=0QS>aXy%3T27#%Qhqi5)~1`Dp#F`$BIrnqwL$(~fae$YIIH^_>WI%aX;&5wJf4ll0! zFJ&!4diUBk!rKb%b-Db7Bg;J;_v84v@JNV|Nb6qs9n(ASr$Is$2A4UDj394-RV5Q27%9Be)Wh!||f?kcd-+%Av|N8wbYzW9#VJ0YBn38A3$ z^*AE1=ywf%+6O(hV?#U38d$PCiUDq(>)gG1q!j^|iEaQ}BNpa9>pqLNtjtX7PCkr| z1M5XT1f~Q+gb_uP;61w^qh=!vk^~H|jAl@slyMZ6Kj41noQCIQ#+tQan!?f7+qMI=oSK%Ywu>~!9!vI1i z$smuQ>mt}S4}D8E?hne)@qv{C-cEJ=&*4OZJVr{l`}0BG6J`>~$$;W}z;=eba8T4D zy3L}9BJ&4hHDE}AcbHTzjifnNI1D=AG3*$#!rR#3M*)-`{b3i3<0k)(rEWQa>{x&W zLRB9xW+QhJHJY3gzATfDBMNWM;f4}?sEASe5w;hihO$Atj#!ipbuAHyuR|3}b3bSyq8-LA%pR-WIhg1%cc>baccZ z42C~^Ei3+ynfI>HyisU5+BgYvib*5N05w#zkS64x zLFIx_Ay_wP{sIZ|X-lF7RG_AcDdd2_P{s&3_+T!9N(X%lvJ^ly5M1MNCaVpt06zXF!mg5p5pv-5_1Dkyb>0#SyT`znBL;k7lf*P&o1 z%HYbt-5qcNQ1lqY#~?27+1c~=vpNk#JCWvqwPvJpQms~I4^@iINi~yqK~__T6AEJZ zgzVgg*8?e;A3Iy?T;BAwDu?9;Q4dwFi>*#I4xTy`z*#s-%2W!Ae9|_!ARFRRbX5|U zwdan5#(o&M4We{F7Y9=eKg^Ct!1*Rnnf9B#93yvIDTh&}jaL?#N@1s_Rjw?kTFsNg zY39?!uui?0dO2{Rj$uFlX%F=u%^E2?iyP1B{1aDZW4x6gk0irFOqB0FRM4@oo7CP= zx>@4uVBc4PQ(?Ik2jV5@7d!)~F?&07=hrSVlKQ9RPucN^Fa!&SbK}hIcvdblT9Sqw z8QZ%AA9JWdAP4Ml2nRJw?GpbLwVQmR+fLY%k{TdBNnKa{LQ-yWv@fufV|%x~um&*B zbR@OEEvo0z^spChxi~%X`jtXn(oTDRsPLn1@%|b;MI?a}_)|3s#CiTx;hm@K`IUS* zcUNb;3>wa$^5CKqdHP?vLK-%Cn;A&l`WgHutA4KRA#Xg zOZ|wNG`0Qdyq->#$-HR)h$$r|Y?OX1c&5+9hF~uv(=W(2IWlLV7qWvF)PO0rnuZEJ zA+`&Qq!2Gdk~?0MEpc9%fahg<_Rk_-ob6PQ6z^Z3O+adm)i`qv+&quM_^?wnu?ePj zX>WS&Y2v|BUHZUZ%EqLJHN0$-!>1TGBtrJ_;^*5<0)8HpsgCAOVJ7?uz@|;)2PDh& zbGgnuUJUP@e&FztTq;Ek*Q^oh=o@khc$XdPm`M3N;}k$|C%&3-YRY>h1Qx>K1RN9@ zs5?4V!Y_p$S23IdzP8s3y%q{4)^@Y$IryvK6ns^V-xbuKvr>wH7X+sZRD*%6HTvS$ z9>!8F@)oRp0;n_fw5Ol!IOYnICuar8ev<(z#Kcp5-<;+*IK-dfkYM`xOek^z5b{1; z*;>DsBn($s^6HoDIDRgb#Dj(ioop$P^C%?w!5~MGD!4f$G7icGzAaw?m=9qbwNWJE#QF^EQ1igoc&+^-Rb<;hOsV2@PSTm0vDB zLoXS5?#kv7pmx#ZCVc0bX{k#V5!|0cREhZxdz14xo(~Lj({#RA#46MF35aV6gE&@h z5+k_BGD3bi9#@@yDY${@Ga;Eu6~~2tveOlQOqB=_46(`KpoGl3%&TPLoK&mnIn6YS z;C7p9s(l5g^^RS%pMTIylYSi3=Q7{e<}&8}MuoXJ-%MMPH;?!xZyE7DRXzxRQWIqR zvPLv_ci>##xw{1MNgj>^?W%MvbsOBkJ5mfW@GBsB0-^M6X|g^8BBCrN1m*dn(SXtzF)Xscw(u#nPzg5utwH%meK63JtyB9TSCLz+hlP_ zjC@ee;-Gv@2ze;~T{(v%WOksLb}Q37LX&PNa5$B^2AA4RSFwap_;CkC+8+hYKNdLj z+{RryHtvEnf7`>J(IvaZ>1OXu84&IZKk?;g0f%N#y~{k`;}PNN8w%Wk_vj6;X@uaz zLnl(H8!x0%IUN2hwr*e2-7jO~zRZ|XMjXw}KQ$!x%*oI2OJjxc9nTjy zN59~O-R315iNj(YLZ3gnRCd449(|qL{ZB=ca@9=!#J+|ra%7~QAQ;yMmmVQd1>hg$ z1Cizd_dbqew=4I2#5701=;+F~d|0BEb)I7!;z0-wp{v=TU?O)OsTiQW!CeHT^*|sE zq+Srr@H8I+{@ZeFS;8XR@dU*R>(teSiBp#>D~GJVIOt7^kKi53cgI|uw!}A|naksz zSl%~U=rH}#%Q7OD=&nlqXY$9K9C>@lSMjg;cC8k{TEZ*SA#!A|kc)Eb971-@mG=3) z_EBE`@Pn`tT^KUCJQp(TEN33R>B~6U5R(Qt2MQs;UjL>$MJv}2g~qM z=ChE~SzAg1EyG_ApPI|w(tp@8Twc*KJdu{P$trvfcUeJndizO<3IFPx=TqW%lHmNM zYYNq0ldY-gcJ!VSTCIFDeW-+1?`|GGs_Uh|SYW?T(j|xpIy_3uG!HMVxv0Qc;kTw< zDb3q5H!2sR$-go!ai1ind7tzuK(TbhxHL7;PoX+-^!35<=a%887_G-`2rm~eEDjei zcYO%_b?xtN&0-Q;aoC(yI1gM$6&f_(3fFb6UW#6lR2!U)Vs5Xs_?;jiwbsXTMRp&Npg>Ic=4BqfpMHN`#n+sb;{ghDdg`Ht ziv<^gwfSpif0WSlJ*T&qHF+~$mKY~O2g-@ygI}HtSS&Oohkr8G&n%pZ=sjohMN%ZV_0xn9 z=gaD8%TAhm*|~YIF9CwxQ=3+nA9p6L?C$R0BZ6=9TKRZE;0~2=H8H}t@h-IlSI9mW z6Cn_8piWFO?$ZE~1~_VQffVC%%W<%O)Y> z^))+n;;*Sr+Wm{_xZz#rZB+Q|7v7(%?C-imU3%E*UEei4m`yptRJ%)Ehm(@u=oOIB zw{UKrr$6|hIwP!qYF5yQe8FCn(~EkhZ^5;brsc`-RWSj%Y759!kYU?$0xpi-u>k1- zej&k|ZU@(n3nMK|pC7MdBi3^h{ng(SzVd7{!XD zh!I>~*bK>707i%?L1qZu-$qh003OhqQA81(92P(ZIV$A1Gz47XxS$*>^+NV1Piv1W++N zIGNWKx&KIrL6-s`A}|KBHF}R@+waDyz|9p%QeX-nmS}`<2;4A7#xN)maDDH;*Y5r+ z{wO$r&{@#+2uMuA#o=Ut|6o^vzyYulL=tC+v_q`izE~W2h5!2l;XOc>g$E;HlNA9v zfZm%e_q$UXyf>tbPzaaxWbBF_D52;+BcdeO{XdY-(ABW+hgjYWT>u2i327==zq%WESBcR1i~4r?#b#k1eb{ritYz~UhC zTfZR}87=p5){{X7M1K@AeWKtkvAz&2jN5Rf0P%}8@?n}Kq5sFeKw5D1ct8#xeHcj1 z*kKCOe0T$`Fl*gV8Vd5Ya5)491d5%oem1t%Y|KXj|BpYox>VQ}JDLUm|Nc}FY#3yK zS5})toUo6feYS<&?ii zC(pMqW0dGQfBRfzgmTDd`l9CW*E!<1)1BTEZ~PXn5;szF^X!TnCymLgkQel5nR?ye zt(+m#R(pShFsE0q&RY^!6Q)A0CK|2*&BA03NiqDEi* z%t$ZI*^{29B3ice{ORmOX3n2*mdHwG~YuLo6z0vId3G~#RxL8DD9k27TPbH#0n*Vrs2!tC|v zFIdNBVvgr+7RNS(U$?=6->4WDN#BZ z@U^rfLNb$!5@A}?o*ELQVP}>(>0R2N$H}a3@}fj6PIg3C(K&-jb(55cO;d@)v_kJt zrtWsnW>F@;61&6DznWEwVQQJ-?6~Bmz8O{b@p9T6ad6y~>Xvy%;7;FIpDR_vIWs(T zs+II{j}rUy*WR`Pn+k_hcEf)ksl~;SG)5hcdOjI+BaozPt2=;^pj` zd;L?M9|7*$&UZJrlDL*Qgz{e~_GN4kZ`tUuz_+jOd%3q)OLb_eH(l?Q;&BI}5;4EA zX2{MYy(7GMidTWLN%!$!m~qXFT>w|$m0$g6^6s0DlE4E`8707C*_BGlb2%tJ5^;_$%;_1#1KyAMto%Jnva0O-)DeE#*Lb47Yv=UOzie=mVxuG-uhD=dyGrW*;UV`p zM&vaDSEUgKFrp!$B`ARRRk9H2UxHg+E@}H8NF$Ubd3;19(GPi|6-cu4MQ7Xdc65OO?9jCYCo^RZaa= z=H{6&PidqRHRd((4R}3A_u1(uzLBkDvjE-6fk9$w?w6_Qz4hyC?!TcB82L)vsb#HX z(b9$3R1@Fr?{$^AyM0pw&xk8c zyU$hO?8w7xx^!~|MvmItE8Npb`MKrxRO95q6V=&Q{m4asR<83AuT@Z{<}(B{akOb~ zc6?K!7V+&c^;7JXdJ736Vcy!lVU6DCuMp=B#xH#(?hyS93VvpqcBUZs*Dy8STzNs) z43~O}e#LVos+IhTAS_lxX)lEzY%%y{l$;pQ5Y!QAK0pkREWO=PJg>tf?;4#9#Jdnw zwpWMF(S>VjttxAmJ0f>iuA6hrwwgBc^k>o?3tmgMviE-NpWnDVw6J&iI-TlS6rgHo z->Mx1Y;wq%7!Sp@DWCh4k6=LypWyJwAR}%&Zl0L+H9@8l zSdhKjiV-)rw!}($X&K>}NacXzDEV%a&zs(Rbm~8S`5N5Bx`iXd?8L%M2R}S9!7J6I zwN@=)7yp;~((I6-g(oDldHM5pzaG%36oMjUU~)mQrin*06E_leT{JZIk1sTqCQGKs*s^1QsM9;0N%AGk}uh?PIoIwCivQnmZ0vPa4E?Dl-_ zxe;o0A;nMZ^@u%1M`9&b+Lt1bi$6_C)>GCCu}o2>F-8lxOYgT#YA;pOsa93HxzA?1 zzZ>$K(Ox3%cZ)8bYWNYNq*YoJAQml{rNSM2+pmG$gWfMHNQsoF9wTZ|%LVuU7$R^o zYa16dUXOf#w?I?H*UBpYt2tR3OZKXwza0J=U46ZqR>}}`7&KxM@O=?WbeWZOE4sAj zjCi12J9~hd1NCa&g*@)(BNNpH+%#P!Lg_-%*OKZSE83;t`oAChb8-)sREKrbm1;;< zRn(>NoItuz0a=9E7BbBr+`^gLvByWees|Bv-Ep6axkB#Jp~8iAAuDSpO{ zN?b0SK4qt$aB4~b|3;}><)e;-K!6tI>xttFTO9_Xwi8VhSMrwViaD377~~1D0o=_K z_RRv_?}lG8T}$m=42qBLBF2?YsTtv=@Kesg07k7sNt7`mYfFpbSp-4Cq)^F1WvLB+UCuon*8O=_fI8aeG9o}d z>`D1tl~a6V=vj4Y;*_6w$jj4VYne?$KfQQrNcl`q8y!JHgS zpSo-M+Da;y{>pT{AZgtK_iVt0Z-*)u?+KDx=hs^;kiu!_#pj0EsS6A-CY(Ychmj>l zf}caxR?l_^`jG;o=&N*(;M#i*ueKJv>1NIyn%8w$91N#P2RGiM#&{?1_fZyauqLpT zj-Dard8t;eJkzX{>Pv5=KPQII$3Nbj=#(x<*9mUk?L{;^_ja_@%h#|nqm+K3qp(V< zFL_f6l>lF(;@`FV(2(1yF!?Dfn1eq5-a8dWvEKel>gj@lG4Xr(ZjwJEqtowHn4V*D zqtCxR{8ZuU+3DS+kaxqmqJE*d4m{#-*cW7&?GuFVL=1c5dTq~&Q13ZZXwG(=CB%;JwDHo=|FI;)SVj; zDd=b=@J&*!iLnmesSh7>2v)IkfAzs-zAkPsp7%fUl##X*JaGb)7FBOQ#MJ z(+j5)^r}?HY5Rg3EIj>Pqu>ZyWReQ)g++)rn`Y9flqv~I^>RA(-o@!jVz&N7aOQ6P z7-jhJ{e$I8IbvRizr_M9sA0U_O>P$tcAekTX*?E>n;eW~T;E?@%3<~Or(NUU;sn>R zeeXL?J$|LiNa>tABP>_NJ~u3KlCaBbB(R%VLjYxTBxNqPwU{~k7wyc)`iCUV1#M~N z7YcS?uk@wL!nR)M9bRgCJ^$B{q5XD?DGy1rW~E#7J%5!yByrnM^tYS}7oM=pIaN__ zB}Aw{L6xH8Zt7@;{Ck3W-oEwJPNc!^Nt^W){?ZgOEgW3GQ2JT5FJY8+{!1;F12sx- z?@Ir3ju4|f<0mbx4`_v#=jGOUGJc(o*nD;5Ag*AP&RrcpH*Gvo7SftDJknq3TB)f# z$cGyWPNoGESiz2Lcsv-Q9mtFt{bd4klCpmR$0W-(n`BX`xI)e^mZ?@16j|=5Fg*Yp zsTA95Grh}vUB@|)+MBR&yj=-Iby*B;=GV_L{Cgy-bi)ppX|L|_8@ z&PpcltuOS`vnS;S2ue`RK-TCelAgQj*KAcvA_o|bxc2R#YvNCmbd21*?3wYR+zy72=&CL6=(6zg7p?9|R z$Nc(CoILNx$@s8!o&lF+`ofK;MQgfa!uUFZcJg|XnM5t#iN&LHLyb~YJT60!R-Kqj zk(Z|0J4A&w(s=D#U(?Tct{7@-y;xyhXX-K=ytG5|17o!kzfPW;YMI<080qV`wB4Sg zmBId8q)KR_*12g)6>m9UBuHk&D@Tln%8|_oyP5TK^ZV|`rcyiX+E35pTe%B3N{8F} z%_C0x?F*rd5~lR!$UiCx=T-Z1GpE>;2xBhu-H{d%6xzBDC5s3lv6|ZVWO=ggyz!UX zNCHV(j{Iq%t2nmanXpGa1UA#Q%e1cN6jOaD%t=Kcny6BZ>H_k0kKtYkilLLKE+slDs=<#`-xOX z;w9C-jKHJ;qYzDD&*uDSx`}Uf%$CA|J*5R4FBvyJEib!Na#IE@f22~Us^=x-$a00$ zPmDZ=nhsHo_IQ!Pq0H(5&QwY3Y!1U3^Cht>Qx@?P`x&xVA3Th{5FpL0HJ;be z8*aI374eexhj`=ba*A>`(<+E=^MxTE?W$TA$Bv(0u8lU{JN&YRA>V655t8wf=pKwzScoB}qd*xY$$2sCqrrSUcaY5*ppl znkwgPy*kMhDT89r%9IMXTz#>ZU%Y8BK%hUrV9V>_`_BuuSQoh!llXRd@5y8?9M7V% z5eJluNzWH#rRmnxwQ7Cwf_u!vHEBQVj-Ts4RoKS}wx(X2DFao~>;9=ufe)lxLJzcAG5FnxiJtWSx zI^*@w8XTR?n))y5QKmHwiCV+^c7wLsOKl|<*G%@_DUMn$RuHxt5hS^#R2Q7ZjX0a&xar=(k$_xr!H@G#Qd6GDB zBxeg-@XFQmglB(N>Ruf^|2Pl?M%}w!d0SHJDkYrLOT4EnrkNnkF21%1e?2%Jc%O8s zvNf`p6iSc^X?<2bBJEZiIseYRJ<3{XDNk%_y;5*z_I!$3tyql|_f_U!c@7nHuLo0a zbZI|Jwo()72*S%N$LIOoYvcv2Ze2dtUmf{lY`LjdCud9ILN-oMvyo3b*sGL5F8RSc zD;Khjo^mdr(?q?-D>pD$4dv)x8a9&7c$SQGrF$crJftJI zpZ;eJsl7C|Ama2ZS~_$3Nv{1hk0v8!cFOWt6|EuCLA>Ty3Y|)Kz)95fyxcl5)yPi$ z^1MV_Adv>QxKw>p*%rs7Jw{LRzzW{Y1Q%9G2k()4Kexa}ntyd370 zxnI6&D&4lQM=Hc*h{?B5EX8fm!I)ZPdNcT`%~E;?hHR5xOHU^>NeEQ!o)$FtBDk<; z#qHu!E^WffBF2u`a5^|JbbG)4xMuX{t%0Y;%ib+)t+|ZvqqPc>{OW>hdX%>Y+Kvat zEVGxrmYEf-Tgm7tonojBHB8cp%o`^2W1E7+zg87Q;(yqVsGs*>L zu2p|aiM?HaT`)i+x_{9r>($SCT10j%)#&+4p^oL=`mKZg^P=&e95}2i1j)l!D}`2W zKA(2{63`e+jkx+kXeFOs7E2Xt2J5O|;d=Alg*VN6Z_gD@Rb60wwz7)&H_p`W+n~ID zT2T?`Vd>d;M*rgv|!5l-OS{HdT3Z4_5UBWDqK+P7$8+Gqy7W zmQIH;_ng^mwfru8?|E5|m3%KofA|pwIsEPLmb;9Wd4B30A4}m;*@lI66X)}kT^qCt z$$_|&uk1@+&zM|ROVm#!MyHoimNgkA!xmn799~Zv`h7YIdm~@#h$M&y$h8rFxHrQE(Z7`RzUj5q~6;_v%*)Ytomu~kIok*P61kIuUrC5j#u2&T)EuZq5Z{y~ns#0*jmdvuQ^?=HToU!Ex?;&=v2MN7G9e>DE+ zdPHb{`QFFOzuIOZu_Z{J;6;5lI<&ZN(|~g9l8j3Kw_lViGY*Z7ACqZ&JilrLWQ#JJ zAD&qf>c+SGUp?h+^jLwW=yq4%x6I`U6m?%q;lU<3WRE94&OpWHXlYdCC?H6xntT5-!j4E=jU$d>T zIObfP)Y)wRgQh%}3@+lH@NNh-<_|mY=EJcj!L7U3)fyRW-~ZF;p45k%MYNq9>rEB7 zZ^rx+GeA-PN-F#uP9V1h`pv$$BYG=5MdKCWYM^_v@xlAH!flRer_%q43phRe^J!eE zOW&O5u38?ijV?Cig<@5QhOXyrA&Rog-`+N8EN@*_2})Q=!0QLb^DQ3yc>3wL)ENu) z_bR0cIn#qJlPmGNvdvpu4$FDG_=_1cdC??NJjiowQbFh0{^ow$ZBt=NW)!F9Y_D;ix_!4)dLGc?FRk&f>F#U=|DMUlzx^|T6h^ojv4Q?LE{x-ts*S#n8kh61 zR$J4<($mPpR?=#C~caO@qQ!0*l_WLy?j*NhBf~m zS>G63$@{b&+u0Z=_yrr=+-Nu6aAVuH?QD!qPHfw@ZS%y|iQfFbzE4ff)IC-Ahv}-W znd$Be0T8~QvtI`cx|pM=Erl=?MAp|fA|iD#1p3@Ah7TQLA9MMuVbC4($Vi`{7n1#k2LK880Rh!h1_6ox z9ahxL#Mb!@0!Bq25$cE7*FtRt@f`wsWj-t$;{OKW3IzdC@O2`61z~Jp?P_85zc9rA z7e=?zHN?PBxti-}8Bl(8*pmkDdY>`g>x--NZ3x)f`pZ{ShSFj}CO2itnQLOBqkxg~ z4Xdf^&y(a2bCaCMFeaB%7E?S<=btGDu>G=0wCV>T3n-P*-y_o}n5fB3#FG+Us-~yW z{t0gZreFUZ6KD1xVf=Tcdd4AJr-y;8wA)SfB@kI^}jC zcrhu^kzWcC$$~o9=ro>kE$yehI~WhMou;_|$re^YmNg zoOqf7R#J7Pzx%j+Y%E@j1rh2!im1*XfIGX>zy1>6kF3jAYa!A8q1PSyFrG>uF?TbC zM-ncZ#7BzfF43frMInXX?5zcL2|2vwxPhFa+I2!2lEw3eGAyE2TgW^(t%?^C9Cx9S z87k^m*Hq?CDE+)9s_lx4H@?!YHU2xZ0kAD7nx#A`zP>n)6-q`gzh*8Lkv>*jnEH#U z%0HV**|2--Uw$GSnDK@&C(z)RV=gin^%T#3cTSF|x~$t;vHTDJnkeMDIi-Ei=48)W z-lZ*gr!iF+h5*TSJJbD-!Q!8Lm?kU|@dR^Qwbhnx^u%HwIhNn*u(}=aQj?FT`;Ail z-B=x2t;oM+2p5O=oEg1k6n@6IOJC=DGj4dT-=P2BwB161fcV6KfI#_5TN4`tYc~T& z6DAfG7McGgZc{EIgn<$Auyka=>Mw>3r#QS@FI9CP?bv0a?&E~Pqd~33*(tJ{8QT+Us{)k@xX4@eiohnhtg7iIB4!Y>^?&s*S$&3r)#XZ} zhxwkp%nj{bDs1Z-t}<00x!nRvdZ{L|^W#JwwI3SU|BC!P!qQePeZ{sPd!ALC3I!Fa zD-`b@0DZ}nlZ!`ioo&BYOM$dBSo2t?8CKkk)K!f z&4m)ZhaC&&1-f@Ilm~>>J)-5uS&srZi$ZhysDaq7D|I(-d;G6WTmBz8*Pp?>gvmB` za;tZ9R0wZZG}kFMTNUFOt&IAR0@3e|b~q&ld3*_SgsgyzCL6$yUADk;1MINJ;J(F0 zSR{43)83Segk1Y8UO1$uAGpxH`Mm+nWdN^{P?Ue_cwf=QH$Hm5}DHm23vz# z@?ksb=ZsrgO=5>Y*(k_OVg`a4%Jde8Gn0s64U|H@1KYnn1EGPlVen9 zpFuJ^tAm;_`VO6z`>$gHMSDdqr)p^IyNA6m`#Y>I5|?0~Ye z+yUhyd+T@y#9>|3igNG^srbwgQ^$$24SEL@Q+UgCINqa4t8DH@C^-Gv(;VwBzTG5% zfHSIFgq|S&p$6Ot!bAyxrOU_)P@ch|h~LOiJX5uJf4z{$o(|I&+<}ZscqDb*=;K_E}<6S zE#TTDhNAc+lIQPy!;01chfdb7eD_iT31^*Gl129^W}j^cJ-6U62x%%_$;?yTqg9LE z=)dvf`$-I>X*Y&U&r3$8=S-WO$_Jd4bW!^<%J~U7rIr+f9oLQ9SUug04GRgx@xXV` z-9WK~VSoZ=vMVz@@=H>6zDkYm&FO!ABv{DRTXNnAD)+I~NAS&Vjo!0STazZjeKm#Q zD!mmRrdT)F@1cM40G{}Yqg7Y{4%9VZj=1&T0t9b$hB9Vh2FYVfH%4gvGX-IqvHo8j zWTuJ(M!`h#{p-h%igH5K9(lPU1lwFhe5aq(p+zr){|&7cGlcYTBA3xn)wr)j3EkN3 zKS+s_)+Zq0K3?^f#(^+AH}T_2ndt}2LK{a>p>9{z|I>?CIADfE$9mbn*vgOi53HWR z7Z`0iJ@I5|wHt(R`!_^25#@)(_S2!yRM)**0`&MyOy{~(k^&#)vpG|PVBT68msuHe zj!>v9b(am{TyD$=eK_E|TlhoehuN(1cf7mP{F#@ylf8GtR3(brp4VzQLpi# zwDcMV-NiFq{+u=29&9=6i4#U!tu2-Tp=xE0^VvAXI}-pyKgFqAKe^AgWBGj$wTFnl zA=|s~Q^LUhn49t2%-*2G_mTfHc8XF?0@~BMFD#DVOdcC?q+&N}7v8=61+zY25q?tT ziR}=|U_lu}qP}MC*QEjh7nt-iOqA#mRX1lbCjRRAoA&nO^-w5z3Xksj@krp`Zne+A zPab+%WTQ=cx)G{E3X;RPW)}=0dNj=?2s&lNDx6ljoqI=ft8k*!pz;wOH4%|U3?%E3BxWQp7`O7z>p;)eG*$%YdaZib zBh`qfSSB-aT&bUKeNR6)?_EfT+k!8oK6m+rf+)CDd*uoSG7(N8sAdC(O`9TS-)17>RkH~OB|)%t7{e%zMyuJA(5>n&gnbI=6~rdIG6+NcF*fAs zOmbUJ8zdCMpN#rbpfj_W+QJWb>=&wxY6gi zwDYJ+rzKbp`7n$+%rYv>m3n8CttEbSSIZFJ&fl!w%Aq9`yfpL^vuxdDGjI?L?J7i_Bl@CPHCJWph+ypC zc}6ai(gDqBu-MANsC2gL&kVI0NZc&T6H_BAIs+n(^v15}$01nV8MluWPGpP)SSk8T zCQf6a<~-I05K^%2-A^nMhM@}|NePc4Av=u56h4kr77=Hmr-IjxUc=?PDgRk7iNeCz z(yz-9GjJSP9vjLhP$LU1EuNl^(Pix0Z;Mi8W(uE$1&?R$mVikmGZ;nIfr(mEzopaj zltjjTK6Siomc2UlD2f+p5r-h!`h`W-XJzv&+KHwA@Er!qh#Vd8ONdZU4SUQ6r8%9~ zmF;Cp^&}$cWH!aZ#V^I$i(Oc^ytZ+CAGs3IFbwm=@t-{EoZA!_F4hO{?RuVCi>&-J zoRc&X;vl_BmbgYRtzG%11RkQ7$!Vvwb01bzeA?huG9Zb{K9+*=b(N++OXY(vH8+3@ zdRWx+eEU9LkueM*o_)0CbDmK(OCOAe?C!#zBGKMX&3uuMLAh(jecK^xD)NtWz><{_ zeM}=C(ni*EN)+M{9ubvciQhpMO3Vy$l)2fA?s3M)pv31(4&Jn_JQ^ZWTkfsy>>oNs z7Ix#MEKS-~0s5QtmaLN|D48Qk$Rm7mAIKWTt*TLhnoiZYu<1v$GT)3d%%W&VIDoYr zk*;Dq9NS8k1ulUun-f6D6~9!Yq|K39g!35O1?BYOTal1P-v#)7y-sq0uuJaz%Vn@1 zttFeyOdroJ)Ed^ONVa7VQ-b#?MVn-yYmY`D)xn0b8FN(ne?@y@7?dQBsszc3)|TvO z7Dz592wcZy9pOuRS2E*5;(He4KS~d)>Z^+4T6=!|b?}dOs(99!jZ9!nC@J3!t1^}X z8y+{FGA@XeXG8I~{dx(pe14dfXQC8sXlJQ-#bmO1KAe+(_%CrJ7~!7JiiPsFFBzdQEJU8N|IXWz-R;VJB$@aG)}BcP3qd0 z)MNuZ=7AMv9zExhqUH>2#c1-5{Y(b1-j?Q_^kp$N+c3mAj-|vyf=g+-%dq0cn^-3p zk%(exo(e{_%3w!?PvwoCP}f`8sW6~wN~3N$&5UY@E&9G@#>?=CQtlK=8KyG`lcUeHhWfWXFAUtoo3o9M1 z8b_iK+Fp-jCe(sNWI;TjYeh0(glT|d?y${$yJFZ-uRL`xazerIV%(05(1bku4O-kO zT5+=GKa4{vP-=HaX!b`dTwb*;$OavNvEEAY(tko0%YrHO2y+7=R#5y5v3F2jay&sO za@kaO5_9o@OX4u&JJvY5akKuwww^UlkJ6I!wDb|GTb(-nP)C#vosk*bCc6a- z+H`cDR)uuWfcA7^g|pA4M>-P(T)WU52)@$p0M2Hb?S5WRDEZ)g0H%e0%QB8!&A~bVoV!JZNoM6JuGW1LZZUi6CyU94Sf_<(e*7>^ z=C&Q)WW5P{tA*6ywP6u2j4E#Dsu|EXL`*_22N}}d{1FkV

    zjpM$?2oOk&NH<)1y zI?lPtakt6qmhfqC&^PM33JdUkk9LZ2vuQz!6h;Ic)qdCKm=X80%h!^72H?lkEdp12 zN+Uxfd(3M&PYbqTqaO+Ve@DQ@C~TuiPQT`-eD~>H*+zfx^8rNDJV@D0#`@m~I=yS0 zon_t%v-q0D*iI3{Nc+Gate3+iO)aq{QXi>pTV-e-zFWhMv2Zt#u^}ZTLjA61bQASh z(0;oUqOE@T!7bAq7@>n@xxqv#dvuv7hz{74txR9A-p|*VDe2@@`q+Sp>9d}_M{mxZ zz4kC?qYIYQdpEWfV_ZB?FR?)@vJsf=_G^0MAYnj~SXWR9T9v1{Uz|anuw9K*xPq={ zhMvhpA!3QU7Qe|13wW6z{aGH@`=*BeWu8T0ZoLd|BczWUrLO50NX>;da%y8_J~#SFp+&@4V3VLZ38ypFz|hk zep1P@SoGWG=UybSK^9d}4E{~K28aL9Iej&jP8=)4Py zrAXyFmejuiBUqM>1s4%@{BAEXo4MD80w)l`rCf;3E=3#Z^G>2)@IOK`#L+2g7DE2f zy%vvxlZPMIlLs|T6V%i>iM@uU2dXAT?B)x9@v>YGhwo{;K4$Orfm4f`*~VI;9{;a2!m{Qa^KTJ6SQ?%4=uLqo%ho}qOF`Fx;bj@+>3cx)_A7pu(@Zc{xBaMgeHU{D#Z|17o}q`D87}6+4Z>P? zJw^lZjT`BlK~7O>YvBAJ<4nrOa!*xa;|EnWvykm^ zxBsRHZ1yqT6KuN*m8an{=DGVTQI`qQ3W*EOe!Kot8IGfNC9Ibb_PwAd$IAMjoWE;d zmeRmjwdyH|SDPkU4@=AyJrFgi6Am$&gdN26|_WRokBymwl4&~-!P=C7O>YC|D2~ZYlIhX@UfT9VFc)QvpXVM6@p{73Bqv|J0$15dkjm{Q=`RphR z4umAB)k08!|r zW%x;gHyTj*MhAAoKt!nBX_IT&g>y~fu&z081TA#HuZy#HyWNsH?gp$)R;2 z#rc&3aPm9%8<5|}Qbp)}tPX_VU>fS2SYr9-XgPkG5@>cT&@I@gIGN*^#?*1X=cv{( zo|dhmS03?^cgNe!SlXf7+6W;0`&&72^{`e&J=ETY>$(X&#fWMwANP5}*M8DkH3mrS zS>41gjBS85^X)q1^L^XEGzbYL#A&p(zlI@h1m@3cnE)f}~d;g|< z$(+}!tDK8|%(~Y0qQQ*&$;e4&Ao8Vw*zEc``HpW7F7HM-^ph4Y7-uOTjG`O^=a&5$ z)BCdX(NDx^VU0v;!w}zlLAoQkJ)>rO_<}yN7_K`m!`d{t zLtNR^HR4Pu!LOBo*-zcspXSHdkz=;PL9XnR55-|!S9(k5*bQ19XrkNt=G)Uhy5%{X zccMlNW_+WebWQ}}*A$#swia96^?L$?py6ftcv(($cpJ8h;vam~Sy^)5hD3&knseR_ z)c0xFKT!S|FVTLaM_9a7kf+RiWD(KzVfTzXLVU7ujwUXO+ z?Sa5JSUQ1pSl&9S8Afe18W)>xbSo8a?_|X>^@iYlaJbn7;(H$dHgd5nb8Tm6SdmKeD`tjWp;Bv|!Q~0c>)H)X zG%oYd|FcBIS@<3H`6?^MaY`W=!VR`eCL0a%!xl8&vy8zqhIsMHjRmDfr&y{$2lI_C z{%8sgVa3X7N$=Iv*nY0~Zyb`9lq=DTQNcVy=Wh)A_1f8pDnY4ag$SbL8a7)MdEL1Z zolJ7Y4TMEagW!=mI+{Y=^U{&O`%lzs;JBp!vz400t=g1INfz zEIcdIq3=|0Kl^9FKK}=WK9}>+cxzPZZ4SGhBO`|&WR}Myx|3_b6!4q*V0-pglM<@!=OO4~fB|G^8CVsq z4Xtwy^;@x()cF@z026^Ghx`&z8?lSTXi$|%>M~N{Z2NNJP{SLm#aS9K(Uo&JF;^yw z_?>Vbs{zbPyJt;c{;H9K>P71BDoblFgZ&`<&?$U)G--xmNf2~e^cXPn8!4&Yh$C2D zDaY?=GI*vHX+gD(;KvK?o~aMQkLHJX&MrKT3kgY3gNsf34_duX-krgjMicaT8>G|< zs%$2WTIh{BPsyfnzDr^R9U2YF_63s}mqH@Pw>yY7-Je)A4?#6*lQU_@T4WW-)3L*h z2>Z=KWiCbtL|361T&L&;>lJb&zEkNp4s!_vdMHa1HGofSUI2~*eZ*j9#he~BJTh;S@CrZ0m5Q0Za1E18&mTN`J`ZbYD7)2iiU*Yr7$5zJ zg@VG+o7Nq_6H5YSD&64w_wMTP0G&Lqz}Ds*EnUFEN}aH?msw!la!jag_mBH^Ssy|8 z-ME>}?(T{apOcN6Bl;uhu_loxPMLV~VVH1xR}m&^1+p)p`b8ohkpX@9Z!KPA;@5+s z2Z?@UhSlW`^6bSg-P?_sexo$z9~*L-;qJEN%2IOwt{Q#7Z~I23q5}Y}eMqgTN#S!3 zr;i7a*4?_S2f|<1&HHh&0J{@0yJAh1U~d6@suSjdtz}K$_q=M+1D^~KHua6-Sp#<)Iot(2OwTt z{{B$t3Y_7vK^Ejsr~K~{Qqaq5+;s?t$+ppaeJ}5|J?Gf)V?_J<)oi+x!Y}SpoYv#Gk#P8yzW>r8N*KuI zE%$j#JY2{cVs=#kxPdYBK0J!u*2tzvp|*5UxXAUfAvX6%TbjYFP+cr#WV*p5~g zh@D(5C)KmlS{DC02|ab(aIADitASC9N@0mJUR6O0FWx>MYSy}@VGmU^ue(Cd zg2xa8Oe~oE+kq>K%Zj5F_Mm?C_|)Qw5Fz##or^z6K1~|qQvmliQ&9UFC+>%E?li3M zzLwSeQkN~T%T*+5)+%bOSVT_){eJw4@csgiBJ;OJr>%12d>eoNTkchpv&vdNA()dj z%$HE=-uMIpwA_xabYUOr>yrshFGF&ux8AHY*pJCV-3GIFUwAkZRG5uWdcF5b_C)?qZ+6D_p0cVNa zao>sBUJ6|3Q1Efgz3q<6CXLo`taTk6#=pjnl`K2{P+9J-P;md8*_&j3+mz~5t>q!q zuGCp1lb}4WLA&U(lftCCYuSXtuZK06ZwhmW7OEMYG#N*Vo zP<6JhWE{9^kc@Nm5_^M%K2QcC-7Fi;?8C>A^)ewofNBV_j&A?@ijx1wKpKfek4?4< z4`{oT4-Wm^yiz-g@I#g3`7j0|gS*m9r{njr*Ho00i_65`*FA~tTGA_Zl-Q*0Ai z8QA0Nd6I0S26<`a&(0vg-BFEAka6P+OrnOgT!!^}i5X)yM?c}HX@>Y7*JeZ zf5Upv%*~de7B$SQ9jRR+#zMx++jr{{QzFv3S?7#j*i>{aAE|3ub^GQ9E0CZVV&Of18;oAJ<1e%rZeul03N$-H%H@^U*%7ze1?Ka9ECm(!Nvb_qs6Sr!*hdU!x~x0#}_C zdl~I4=sDL!f}-#!zUrsef6ciBTF72!d8-LIuQ~(f#}O_J<32Qn>^2wWEI+drNPdx+ z#V*K_h4WBKF5yZ&sP`(1$z0}%S5LEj>F zBXP>jRH!Ut?!g~elb7=s^-nz|zlcnf`OBw#Jq!7)e%rxXa8;%37hq4xZzQ@rzF(f0 zDRgIgEp>X&u5!DNuKuFr644aax&bQw%rp;PQ|k(AXqAis(@)`;lua>uTKL#1#q_rV z{WQhaGCs^B`GqrtfmGtFVxC@B4iDBpuNLv<+%$4G)b7aM19?w%yfEefu43%ZMsuK) zBlDaOGW?7F)W>pU0}fySC0=&e>^@G(`4}bsBru(*nk#TNqzQb%?8d~sI)f|;M3m^h zqt}mS`1M%D@nAUDgozgyHLpK`FYL@Sh&^=9iE*42z#0Zei?{Lm_2iYiO5*1Ig(2`a zCL=XDMI~ZDSO;^`C1%?Z?tkC5Kd%rZ2Cri_ca_ zx#hkLi|e)IJ9pT>YDV8vr2Tk+WrsCu^qu+$(%yJg%s-C{P3HA>7A6WvxvOowp$ksY z_B-6ZmPGj97g2utZ)q4RE}JkzfXmQ6(?U^dX(@8fs(9WZ!rcCMGN)Qtq_bK`wq{0g zL>fnfz;;d~fIA_zc+1%31K5Y2OV$kO3fc>jud35QfuKaJN8XpI-c3UBXB+ zH}XfTJV(RU((D<~=jF4v;gUT-hOB2U)~C=zaL4Nmjc~$YVcmw!5?Usu&aRbg&X^91 zP{^8b2Qsq!+9sOx0wYr2aCQ&ZJEw6(R%>eAT+Cblem}hvrm?~B$OEi<^&V&~nR{MS zq`j3AMUt8#Dtk}NCRp^cP*F(gmOa1p^p<_}-YCPN8%Y19lwMsuHX ztL@XcfhqDxBUnFt6|hLd`Y7mQJsgb~$Yo(1L#<4!2NX@JkiA+R8>|^RE#zLnr>U98 zG%>35G$by89vW;}Vyb5gJ*^DZnC%u!J*-9?-NY1pD}otx*~7BmLn~U>XG>omSUTkZ z`_MX7!u0}9SF50~Ctyep9*a)^VnQ-STGtSa1zU#!Sl;PoGqACU%zVc!(j`k2LfBhEYNYLeM}x zbuldafq=Ez8^{T9YS6~pG|T(~;RRUz#mSWDcS6@z-s=~ecq2^11Ghzl(Fe8OwJ>i_ z9#7vjQHb`?k!*t0gpq!Wvv}>v!uIR+ERfc@f?CPy1yQfS*K>Y~2ND~GWI1kwI#Ov1 zZ$|TP-E#oRmH$i|e_g7=6w|+)z<*IdzI-ETbkh>qT5`&sk+1JM{@ zFLvhH8FGG%M%y<7b#$_x7j>kWwmgS>s4yDhq2sypvW_a;+u=>`W0qP442=Nyc0kLQ zFx6um38ILWwBsIqwaZ|25)FL4ke&DYrPBbtVciWhEWqtp=FtoNj|_|ko(r-GzcLJ( zjldT>{;{#X$*v8bg}3p$tH&Du>Sw%ymHgz*xc)%c+>zRJc7~S2P{V7Q##YAa;Rugy+P2QV7@} zkR0(lMjp}mS9N9Td#LW7Dn0T(4?jv;;ZN1sP98L-TN7N(RrFt6AeGb$-HH(wLn{WI z*XCja`AfT-SQ;H%3KydIksw}l1de9)ob{Jq!V>lSO|wi$VJeRK z*eYzzeK!4=g&*u~d>=#IdcAN;Dey!4=H-FrV|t3&W7vx%M4+Byu?;Y_U9Ps!=$&~n z2d@3~gosQUD$s&$c!c7?tMLdSX#dPj=NQ z$i+~}>2D&_9jLeS5j%w&(f9olR`BN ziG0KNCk2~XZt=>iLG`Bi=buO2pifMx$)H0uCZ6rf0qQ?hs(U`go-Mt%f4*NTIS7)b z4D0nGgN3dT=(QJa`D1NtC?pR`wn9I=Ak~7~O9&r)S&aHRH(fsXKU{?B_v4p?Cjm6p zt5y`DI2sW=2NZgq)$X;#K9RD)$?0DNQ;_g#jbkP9)C3<0jeeY@HmX14zdsuFxLOKx zRiSUhoLvpa_huv8$&mn64~_s;I*#m(=B4EY+(j0pIGNgb5(`lObE#p|vbzGY~; zik3w5^63=AAPG7CzKl^v?_S}yuPLkA=spJxRH(HFmPW@$4r9eCaqM_~N_H?Or>NtIK(0a_?o!CGfxv?a zLf-71V@M*O*ZVu6lrffj0YO~=q#(|g z?ajZLIV_YAfIoOup?z5F8?QSruZn+2V^F&`k^CpsBHV{^EVK-q)M_`bxBbdGx=K^! z-0d0wP39B!C8O~gHB^6M=r_5V0vRJ#SG)$5aI8J9=u@euGn`%z|D}HTRLccc#*(%` zHS69wS^fJNE4l>z^Bon4zZyfrgokJM5{7t*>_GS5%f1Df+5(-kH+ zBfpj0y5S<>s`h>~J)Ek?DLYvHJd&Gd`ZM^x*ipicnv->wm9M!CcdX+lB0sN`z@pDN z^vVf-gqCfm1`i+;y`qB|C(Mwg1m(q{`S06!`$V>^p7*}t zq2-D)#qb_czHNUt8@l(#CzdkRrhOYy{QWN^C*{r{$}52)Ym%0oB5m%kD6+w7=D{8Q{>mv zy~Cg>+>6LJV?PI-o;$)7KyfdL>1P+*t5cpstQRu$ieBMR^>mzb81z46(lS3XVzicT zyS%~^LOlWNj@rwL&PZq~g})9{?V9JAR(z18DST>=F}9l(6Y>KR*ri(xZgApW&cuxd70TTXqWbhD_~C#xVU_UHpWIQ|M~Ns-OCIYNSJK z@Xl^cGJiFQ)9a%(hEoGUO=1&;OQTA9Y$q)khP^`9FQ%%2~lqc6;AJ(%>30`I1d z;0ia%L``ug?%nWetZ7mnM?PZ2r#|=e2MHcFnFxu;+3MX9Hr}SREphbNZ#;W`x@b7P ze;uiOS$s#Zd6Y`kj?(xxH5*tR1`alo%OZW`bbJ1S#b3r_gVd-6`B|==`W1zrX;`s7 z?^r!nrv0SFl`0~}9>em78|DC{`{P3yZsh?+E3jIX+bJl+9E94QTg9-%QO$iMXt#f# zR*X5b9|X3EVk?tn#HMZMbvyVs5Myyh9V9KPiFwra!WF7k(qM;4SND^Jyk2`8rjWb} z{$(Q!SDq(-ypl?d!twgpY2Y;S{3rT}L5$wiXB2wy7_{GEwQ;6<4m(^=e3PcoSzzBm zOPDIryn;M@ws;#SM))6kYDE15RQEw2Vxvmc4VVsWyjG`bn4%!Vv%42yW|4UwfGzMP zlSBQ!Bd$hwX|vRaDc_+!rcO<>kHR&PU`vP!myS7 zVM*|B;=V5q$+n<>bnlz@EpYlSHyT|R&IbM}h)u6?b>)RW{|ia{*?ZO%<5SmHwY~HY zK3g#bE2^8{oUARdcTYchG6B6vvxgeUD#qvsO!h&7bKp_(m~o{ zD;XZ6t8gPFA;SK2drsH>ZMB|s70xLK-i|MVio~4$Vji=7(FjWMXWD3Qx)EWk#M&vn zAXn%m2Vt`;`qXu!-0nK6+irzwO#_dQjPX=g7|pJAhoCNQ^DEN7ER&E<^(W zIkNzMo^F{K1vBf&;5&7dIeDjgHCF^&)Udk_*)C>XoW2)AW`^pOT_ikWRrj3)y+(U- zG6=0N7ts)84U6ZASuOURA2C>7?@6R#jzWMDYVYrXj#9iIZm#(-v`>n3UPlklDoV<3 zP9NYh^x-yvEQCW%sgAP6?$=y_Nu=V+t`@v8CvS(ij#{GJjt#tKAxxT(s1*sJr`Hzr z^|jC@o!bRLC?-#Hhu3D``kg&vb_?J#@Ks>xY6lU|X?jl;v3YV=sX>~Mv*=~r3{H5u zr%;90_lw^F(~@LSecs^&-ptTj3zE~7Bvj2r?Pj%^>$T9Hi4OmCf{bdr?$ zWnV4S@5&z+qBf(@wLFOW3h@XlBBEE|QYYDJU4NBp*z*pfNd1xHM^8nDI>6m0b%GWC z6zqEIZHTi^Uru48%tK>Z{!g0U#^Iu)U~quQo@$Ns3qM2=7*T5lqy2s4hgJuC zk17%T#)qb%Obg+8lrk|NFtFS^6qBYcsejSPvOav)imCBd`(O9mqf+rrJNTb3b2lcs zx?bAum^!~#?;;;>HhSFLE}`1)R`n0lK2Cb}4S`B!yq2L}(A0dA_s>$G&rkv^mTD99^ZP;<|7Cdv$J~Scso2FdY)7;}^jBWD^3XqlqT{5eSQu zuI64jTrH?rEBoE&+H|qE{Dd4k*U+g!Wy?qLvBo5BSWF@d%Z&rUF^ouw{d*V5&5p9+ zo>hxMVQ+vGrPD4P@a;)0^jlv{qx8SIoMj~2b0q_4<#mP@2elAJPoky17%8=u(T6+q zUAXG-(qR&zJ~@WrJk(9C9=SrL7rR-K{W2XHCMsPGJ6yVx3>fZ-xZCa@?*|y0~`vv(W`Kl&-K`(3|DG<=!9Gqk8@gq!?Y zTT-+r{C$aaQKrLM*%Eeh6m=*Qxb^8jX3m*yS|baSOh9)s?vVu_a|7i*)~vUbX4+ zC{4A29>3qI7r5N>Zm;*|x^)s_4`kNIThv7idi8dB@d4kL+XJ@=*Du#*?YlK&r+rI3 z;hwBipKmxPz1_*w_Z%><*meU{Aug}K`w|O;M-2+Pt+)0=cWJI(mzRDcRT)iglHQnG zEqxtMSJo7@me)~tSEL*sx177`6=NcNLRoVmCY3C|-h4MPxn@abR*56X4hn-Mm^<3W zW~fTOMtuLP!SQ(CR*ykiIN~snw65u%L_RF!7meOa`QiU>)pkVHeE_~}j?vflx`a&0 zmN>&+Kic>s4mXY@wU@H%$b}(o1BtYpa*}C`dXx^{wYi1&OOZ#TcuJn&3aHw@k3#?E z@Dk_f{;#}uJ?xDTB5oYZ!M#V8Q=qoFZ?3H0Z0DCh(p3w)BbLA}}=N>ln*_ZeP`Rw65DCK^N73Jl} zqY0;AHhOcbpupWfzoG8|T;-N53LkTm+ceRKS9npXpSN+M$-d)OA9ckIDC#WE+^46|6>?c8$c;*h=0AFoF%hvQyGF*c^ zwbKvg0Ur;Mi852!ECMgj-uZ%X_1ZhzCao(@VoTiargm{|xF)utW}569oeDBO?zm=g zlth(4r0Himvp&*d)a!~7iF6)r;w@Vrm5vvk?Gf5>4mbM0>p3%9>r#&&*_lfG1ld|4GUaOzT??mjho0_JS6eB7h}T^Y$&5Fx`_Njm zd}i;7pXYU$+3H?Db(oWq{s62O5On{HIt+V-#q}W%+Y6lBiz~SQ{Y%lgW%_TuvE*uh zcaW03qppcYxJoi+x>x|WSEle=w@j-g67!4FPo;)sXkBo|u3;euitcTsw9n_E#lVXH zz0vrQZ(!=GWryQIXuGf;ZbZkkRG+1*>Ci|78s&kCrRn z{_6K^p5`=`0chRhkweqWcBU5X+zrDZPv3y$7bWY)PA~ipruh9ZpZC0-2PX7?EzR}J z9aWKoZc|eY{+7%*zVvD8N!!9+2Yix%^kT6O55x+d%Ud!s{$XV zu_k7uASHL@%fPsZr`P_>HDYgrV_@Pn*SKz*XJzdkC_qMe_retQ!|2wPx}#Gk+KrDv z3*qWnI&0jAGSqFnYR9*6FlJX!PZ*tNr4gqCH~|QA<3J30f>paU;_j5OL8fl9ykl!% z>|nlA#ZBG#QV=s${~1;4il-VRUDOLt9HG~N$PWX^3py;Cm)GXQ#h!DmX!3&crv2G( z6o<|p5GT$43>pEh-G*(3#r>^)uA$vT7Q*G-ONo5a+NJa}%n&lZBYl3}LeXPxz4<0| z6DD-B8wb6csKVse;^Jhv`lN>bWdl*rLz9dJWcPF@7UEso-#!DE?JB!NOlA1>p$Hwi zk2ux6N>w`gY`M)wY(DKMrxUyRE)5V1>HL-9XIizrCm-%56jIIjwJs#U&VQ(o_f4pC zS^9WVH|hDJum#0ZAZZSkz0hv|>A)u_L!X_vN}3pvvzB0N7EfXN+v50{r^~OmI^Pw1 z{*uu) z++3vOrgMAjQnNIlCl`$t@xavB;pk_#=1}2rrE&&x25*X2dn;j z-XqFE8xh{fH_8v8I`TyB`(Y>lXZydGz`BB5^N|6XyW@w9|Hl296II8c+xeNZd4D<2 zzH5OlHLpU+p6k>?jy;O41DUC$t^G_(;ky2z4jFNi$^})w^*a2SSEbpZ*V}?&y6VK} z@@&cLbD-I^^7el#vfLr)qc0H1et;diA-QSSeq&RJqZ zto~IU7qrIk-FK~KA8o<3c!%{ewe5#Z3=+REyC2&2Gjw}#+YRiqY0(h3sW@Yp9jWef z$Dtzga|$29zFaX}PqU>|$BKC^aTmQ{&GJlj)wSTC*mcI1cZ4n}&E4JnmdWd^>c&_T z*V=%qbixV*5^m@&JlOGVg;HYAX4egl#0l(|uN_0JJ`yB5@Om-A6N&eMfLH<^_l|wc z=UOg!zbhN>Z>PkI&kHx@J~{IA$wZGy7d+hHyOVQzPMaJiKHV-|r^}Q1e78BmH%cl+ z^$cdz+Ff6Z{|8Gzw7&-H+WY)32s1Gi}F$oOa@?g;33qK#AE8fb|3z>WJ|8U+o@tnl(N{q8_Ru}VHmyXK{P*r1cj z*LBOG;mTy(uzyk_{L$KD=i9CF;SYbPyZ`=AWB9`tB8_&(Y=S>)XUT_r_jS6mz@~U= zV|25g6LI#bc*s^069VRj<59Aan+f>H7k{nqW6XmhRWSA{-Qz~UxOlvK+p}{7JZ6Sx z*N(nSzzP?2O~(VnCw_ODx65GF5AFU)_X8_8S5EA;VL-^cP2WpgJ09eRiKVI1fx9f6 zkEi+D5j3Mf-eu}q`+rmX!!I2vJ<2yBm5Y_~*XCfozzZkDd9%mpKN#Qrw{n5W!dRci zKW>J(kRg1b)p3!00s;9v5VpHrF#)sWW@lD=pYv}xWcfJWDyrr=+VR}SD2{Y3i^B#s zAq52F&%vH01)F#fYpgKt9cVzn@CYBeO<( zMrLhkJhsxGZ`L=n$CqR6dlJxV7I~Jb@QL5Ga|u}412rv7}!UA0tf#j`;Ipub%`gx5BR;92OJsg%wWjbBCCf315(# z8oZuOz-T$HFW7aDfbnwNCv+m&D3esB+_9#@F5z6w9L+0_1*BY~AGn?55}Z_4X^_n=&2DyCUQ zxaq_Z7CdO?7^@tbc(&J|eFy#9#6KQ!Z{z?Y(#)4WD{q+)H#b|R#7-BBh?|>jHj|dO zd$Xrs%2zXjmTIM&%)%1h`rjVKqvo7O`f+#G(kds(zXau7d{o&c2LgsGrJhAKog=!K z5t}jRIdNb9HJpH1R=CF>EFs`pd;D>7FJfxuWxhCm!5Cix@@M5})ngeCHkMoAy7RLZ z^Sj*GQIsgXv89`3vou2m-_;~Xg=YkhJx;*Qskr(1oag=oZ~JYY|6)( zh5H*LNXcm0Eu9`<+%I>1gX7FuaU!vcz%t z$(UWlIV|I)I1!EF2$bWDu)ej`+(G zk39su9gOGQy*Q8utyL=g%;#Pk0%}w^+$)jnytURBd-z^TBOq@CALiN;Ra!3##{YId zXiC6JIX>Aaxi(AB9hTd~si&AcAi8z%ea5kPWio zS5CElKw_oMi&$)4K7T3?s(s@zS76$WfNPgwqxHt?dC*pqiYG}Qk-TTSHXcV6&kP}8 zy`#MCo1!=azqN^BFE^F*P0jg|$LxCcCjx4fc*BBmy9pSpXZufc$X3_CBie00g5SxsynURhl|S4*1W)Ohg+3t|g)ybl+pP3}j) z>@?|&)h;$ecDs)zXk9FBZmcDN$UZV0KS>`(oXVcBN^K*SkVs`;BFA4{|LID=GOc{e ze3KTVv#R>LM)f2c<)93}$|qLD)f^(V()8i2o;T;FE+S~YqkQnR*&}S?-X;DUZcm)w z;jA1F^sgkb%%Q>s$7vR?B4E8-KC)TaomO)z-~RjC%!_YIEhQnjg>r;M&T(P_cRFOs zrzTF+c>qZ`6J6c;XJ&<5L}-m!PAnoXi|kHZ9VubD)0WX*O8z-8)0VV|^#T+K+_eT6PLTkl89kYn*IXMzIB!vv?5=)}z&Ki|i z?MMd`6L*?2n16*oF+>HeGYu$5t)~)KY`ulV#nvC(|MrKZ5XFXAlNH(oXY

    xh_eRE(`EKSUvbHTHHs-7$M(Q5vO*c03Jkd+r z%<#V&?o>_M+g9`-G=3AV+_fZfxi5^QPnn49Ku8kVSofDCf_lU{(v<{lXumh6ToMh8 zaPT4hm9c_8sicRDh-Z6d68fI!w6tyL5O?|xJY>l~drl!{(QbtQ-(BczF8d32dP(4R zw*&sRw6`X;X8W;+#Z<{%TMz@vt90d`Yex`~y+-7a=Qj8%OmGl};I5i!gT zTWqK%JBE<`YAYe8zQ4?ZO0)M+bHQ$i=B|<*b`BvjjZ#}sjx%2oX?-e^_@{3S;Vdbi zNbVa#w7lS|9i_b3oa{w9EsWB#h_;6F`%dV$-bySfwaAlbOCkHaR*Hn^z;8>`|7b2` zQByLulv_9haDu;J1+aLrQ-)dul}8z2^>)@X+4SN{%Q7q?ZrReQ$)D&y+%~# zSUj0LtAlC31D!MM8gbWvJ5hqoIYbKk)QfOZkm*WgSbd&UjVqRGDAwxRR#cDdR1tGO3#*_{*peCBQ$CwW4{>q2G+c$1W znQq^@34I^x*oL<6Y>-iWmyzLLH3pj#VUx^Vm;%UV24^Oa=N(JIT!KH6qPSBL^|LzEoHH{5z$ACRPcb&?T9wlmkQe1yNC3ZsLd&J zsnq|Aw1(dzGZ4D@rMq>DS-(D#~; zgwVDk;+Uh#1dd4@4!vs0BqG|?8ug_Zfupoe2Y#0bHn z$Ji_bnlLzy6J#Xz98f9*qyx=}Y}x(&>DZnleEu@sbNArCaHJ=puf0m>hZmduRi9wm zQAs2dM;#;~D(YJV*}niXK04k*@QJ*wgyF5b?J;4580R{7YT(3TVjO{(q%~36A_-{; zixI*?udO6xw_Hxb@vMa^!RK1oQNH6wQlI`t+WNdCS-W?xv|g8ld#_``+qWGtLHFIn zwA>exVX^rn`@}vbRclf%VZTqvz5`Gu^E{M7rXb_` z282+i2GT$e&WnWCxJ=R>^MQyNcYGN!xo#w__D}jR1#C(( zqbn>~Z%3K#zn*Bf{~a%Ct^;YvNys8hb@@hWo&os*^}4{M3;6?{jHD_NLf3*A4@z-C zi$=6{;PP2?RF6f(MSH4=YsDuKD)CoM=?E8zUA-<|MD|xxOD}H6cJ#Y3o!z3byF^!J zwOB;FAR&&V)r4G)SdTO&oFH*-=Wk+S?UUB<&yFbs9#rf{2@HBm^3s-kTPj$}dV&1V zc0`Rs&CMwLW^`od)bXEGO}#Q@r%AFmvgK z%~4tX52ZEV37<(0WP_5DeCl<%JZTH5auSOSD1QatEh&GCpN!+5g$3&WQE-1x!rzcb zCUj)tR${*^{ha^uk!(#iDl(pIen~gN>y{~wlv>Ul!cU6%cuH;WSYj9lJV{X9$!gD? z-y8jc7f3YA%to|J{dK%j)<13B-L4}r}1BrQoFhT2hS!2;|RYde_LYC5%r8vlJa&$5{WM( zj%z4)BR&e{CJ6u|B_ysT|0X6gGKoank-G=}XArx|_)*sJbl#{HM5=Tza!N_wZACXM zytPnagbxw?Bh5UT`T&WLt~Es2u3DO3;%O*2POdG}t%abI?nxqHdW>MG=|hP5q_4C5 zyZUHq8bYv1uk0p_qFb6y%kt4$Puez)-d>F^(CExc7lKnf39)2KASKd-4vx_y1bR?A zh@|;5gxYv&sZf`UYfbh%J)4XjLqpEEvBW0EY-vxGCL{(TOgy!m2qFHH zrV-LkdM~ol$TZTQQ662d3p8WZ#EKJQg_0q*S@ZufoJDhp<+nT+K>M6&X!x&|wIrt^ zjuj{Tvx^@whH)Aae#X)2bX;oM-*p5fhP<>AlI>Jsqn6(#r?c^6$++>4h0sZ)8UKNp zT5f-5stdtPQx6ecWNN+se>8I(d}+T=cXEZ8`B=nJTPUWb^F%I&Sk73skADwx1J?_Q zfoz60g(%)PfJD!Z4*$i=F4i=?y!6oRpzAr}JbNS65_&8pZelGIR0(?joiY-w$=)Q! ztNyn1UoKQbWLGBp#S^VRCK_A7B16cKX7#$0p2nPn-YM4PB52A&V`@Z6%ZX;D6cERo zQbBGwrudour9O4}843#))nCs@NW_C)5eWxttps#%=ca$TUO1M>v9Qd5#+^ZWB<>{8 zQ^}GFa^siPEv#PWl37~@Qy!+%LSecqE4rt59zvc)B(=A5aQ>?^x0ht9qj3075e~X- zBxJ^ng@pYX^cHHyS;2^=tPrZ&DeM2M_|p{OR4HlQ8;KpsMvkNPX9rpj3zogeHI5D` zX&{<*Od^~)?zZ{M#H{|LFiJfrR1v9n8F6YB@$?ME+mBfM-~Gd^4_zp6QrXQ8@uEB} z9YVG=C(S^YD+v}u7MxwypMTCVZ}LBCAJaghpXOTtjil}NG#0AIEyOA3RFZf%mmb>% z8Rt%!``7vsY4JR`inwyNmfTropCh7V7m!4dbBE}5o|G8g?D?3SEx=1y_#sxT}r%eLAQbbOM)2Pf`jDK z(z0q2Wpie$C`Z(EIbtqrYfPv+XVXJbb{df+rjm>b&Lmn{s3rWZ$|7vi*pHa~BF{hTMbg|1RH|p469Eleo1chNPRUax=aEsoqT~*`OyR zGA#KflrBW-{%RuEni``0!{$QmUwWME)Y5~)>vdVScdlUT6Hbu%c`+ob<*jQ^ZHrLJ ztMvNIN`IOnmU*cEwk+#Uu4?pEU6gXxiF z`B}l}gvuwQqcbyRqrc?Fj}XfI@hfy*Lhsi)HmBnBy-JLHML{TSUGXaDukZ;R5qqT{ ziQE|n?J3=*dq^DX8zAJ8hymp8vs=7yL(saRsjy`sLyXzQgm4v?Jd!Xv$+3b& zh}PG74i|#g+Vw=uYY&q9-*qEMq+R#;zfxu0?-c&I@3__9qImsOGJgGX;x9h2DzP5D zE*n5oH(x{1`}iHB$DZB072(FKUxYqHKg@#a(EGuh?aAQgn>ei zn0cY2U`a$Pg(0La8Wupf8vC0>s=_i7xB2px5_8cbhP2t5c;2)C5&@>s`{B*A0!4xv zi3X-=34@zI{O3@y%m4H38Rlf|8AU`#SrzmGMNMMdma74j#e^D!$&M4m z7G1R@7j2zVC^T+sMlKPy(qo`5>Ta7O#MyXPp$JbO*;2TUqJ@j!H*)jQkzN2z$t5dH zDHbjygj=d*znulrMTh(;Ilo11g!djI*b@lX?u<&z-+zl(V5*h$E&RB>ns7H5IGycdLb~b9zMYkxw{kBP9~H zDIA+V5JjgF+wn!{ac7n?7uTmIH<(9(?O&sfeM*PQ`eZ|q?`}+A_>Zm*1!`o zNx4@NLR40gA4knzxG{~KV^sg&ZCuDTXI7&To9q%mL^+re@ONVm77>A_)4Jr~V{$q? zq}9~xs=Y(AhyPXCp>MI2;^FRwe+_b_mk=En^%Az4sP6FbPJic&b``D=(xwV`aAbaI zB#DPRr9_I-B6=mks$qlwi5(;_4*Bg%S1fHXls-pX_SYa9w2w%ulstO!2aodaE4t=t zDH&x^$Iu$7UX4NI298K`@z@WU+>zXLP+)mXd_-ULp$Dl@W2; zeGB5qyRcRma;$W)B$^F5u8JkI7c*kV)9fT&bzQki($$)D@os*+K$K7Lsch&;uK*Q~ zp86{2YQ}fr?Zh}fnMnzIerIR}Wm>J9Tqz|SrLf7zzOm-2%aV!Th4spcM2=*N6{B#f zg6eA>qi|Yae25NBizFy!%$_b2c@g}}7oq!1T>#mo<6r46oo#=gbY5n2&nYS8V{D;w zu~hC-rqFXKd6EG-xGsQn^<^W^d#MTP$MkwWRx0(z+$4AYoJG+1T+-XXWIJC|L(ovR z=!Ggf$$dt)oJ%2Hs~Gd;DWWuj-($>QtW*)B`azBA;>Wh6FPTlfq?Soc+4`3p=?4AM zb(K*n9~hO(8oC2FzX*$5_7YKzOV=)}cjIy*3QJ%4O13b!cWO?PYqnrimrm zb*Q7-y*77(eju(|%l&J>uXF8>D4mS0WL#CGQ8pIn(9AM|&ix^9b;C^_w(aHlvxNu{qeAl>U28+U4`Jziy- zao3UVO=C9cZa&rLT_*LtNUH4%Z0@~s+FQ=h$}C4hIfu+0#&O}yT`P;|3i}w1$|`Eq z9oY)^9cgb0qj5i8Div{&0o-{5vSs&E;;E9DocGJAn(wi}542Rx0gUp4v?98pOq7SJ zakD-yi^qvbPk*PQc#o?LtklUqAKd8ug*6o9pjjoud1ok z5e$7T1RO6N8tX2(!$`c&rkp40&`P>7MQr(MFRJ=CO!d`Obnr+v_>HE3pg&pfn+iM0 zZyj8tCYC#yGWXVKx8wtx^R|@cnGuYYcLCJjrm>jyE>>m4qcdxT1QJN6zZ+>NDCjj& z&^tjvFBrY|sxjh0GasYB!G=DuVjYsw>u9xSK_e{MX8AokxV zGLG)p0XFcnz}qVwT0kwR6|<4gRn$RSGxSR=^~jFQ=D)H@U$UGPBUveNjcvr2B1=gJ zU4I$Xy$KWQtJaIK<5)-7`8u^J>3hJYea)xgSj86nT0_x%woSEKYEP4x%(c08l6`EA z+G47X<~p>T=C+f%k(E?ReYQWfRkZgxW2m;qQu2+#-=x&k4B5nQyveR+tAER;M3UGN z-*Tynr&3$__GCZlYs&cko=er}$cTQgpu0MX&HN#l$8jB+Pi zu~6RCPxB^pp0Ww`g7ds)dHZ*OsGLg6y8lS+Bq=O9|8b;M+E`sztdF7v3yr2sr+;!S zB|*CWBASg~>QEuhToJ56%xS5xidu#i(7mzMp*3`F4}q*$q@*kV`pVFNDq03GRIH|I z?EQnG@iaYp>3ZwNNbDJmQm7|Y>d*=*dn{W8$L5k<4<@@V#;<7M+fvt8qZ0I5sK;Xx zeG{VmMojv#Y-NdxmXou72%%;ut671GEQz&~v}XcJGSj4dP8cAlRxGKe@?K&i^`z9| zJF&&})KoeDvSswr=-~5A3ccI_vWge;TyM65UWH83pOM$AET>U|xTRQsJ(bK|FeI^l z5skRd1gjHEwPNZ({n^@vv3ZJ5OwNUj0mEv#cb^%zvH)u2dl)~m00~Cy;LZu}nZ5fk}jB!Og^~A?a z9f}k>X@G8$;Cf1?1ci`=+}L0vO_8wMTgh%42_n=o!p16@WGGXtv6?C?PS+*Wr)?M+ zV=roMveWY`P zuCs#Xol6XD6i`940z*yt#Qk5{BsPtuOeN@gg`5$kLklQ+nek{^TuhcdTT7QU8%V2S zEfe0X!c_8w4K%BwKKPEI=9$jqxpM@~q>OfB6)h+{*cLR_Qbt@EwZ_?&5>H*1z{OYH z#3E`eNxG3`G_Vv15lq}9KvYd|Yu0Tc)KPJa6$_z`ir4j)Qgo}Xw~ktg4YR|h8p`5q z*4s2Tp7^(h`S;B4!n+Nl)wDF8c-c7`KP(&1Cw*I)Rmzx2SQdmy<}o=etLb9qjBzWq zh-TzltlK(YDj4Bg;z!mSOh{*YU1u>hp7o5gX4w>-#84Z-_VXBHHYv9x&Dlh?7ZtdM zjZ|l92%FC8&5bBcLuDBMDyn}7|ur8##8MFv5~e_G&EZ>ahsP>qjuJz zf}1(9!FGbzUt^@~wDceo!_>P}H!4LYiC~M{2}k5y-HbA-!TGw8byR`Atk*t`qWU^i zh+2~w>JUJ6G+T#iX*o8Wk#eY{pS>(zo+v^-4n z^bJP0Wd)5+Q<#Dg(}bgnuD(Tws;MU2n8EW09b!pIq^c7;cI+=%z}Rxkr}3t-Zdy^i zem{L>Foz9!%t`+44`!dRn$hdTdJA^cs;q1GWJgRV+!*sccnES}bw=RAvobnT6hEa%jq_8M-hdapQy< zsx#ZAw&heO35*SQrPtr9H?ezXdKKcu)^*S9Bzenh%e|C}x{QhHUg0BI##r}|s*Q;m zkXckwABdtnTJDlPVz&Sj>Bf6pqX&X9Y-NucMP-VVoPsZ#O-Y; zKiSMjG;B_L#|4sMYzNvGXX(tA*RWR`k%9hyf_t~O5U__7i%1|#g)&CGi>bkGVIp+k zRAgsE7~Kvk;cUV9>Y$;~F-V7|Ny)zNXD;!MDYqHhtqz%yN?|8Of?l5(C`q$$K>5nF z)S=Q&a$HBS&Qj8iE$ySEAz&qw)<;$MKjGfjn4UVPFs^-7w2~OAL$j#jZ~O@2mEpTb z)R&qcBh*n!?q> z6AErRkMCsXXf2bdV+A#dHH@r3m)n`xGxLzW?EQkhSFpETo$!9k@Q&>5%id+weEk5zOD%LSWl+1`TG5pmcq`q2 zQkf)*`Ifeft~#Hss;$~78OXXqvndBgjNQ=E6hgL|a?rKeTcYneX8U3`CQM3??SB}x zFhQiAOp9UVmXdPf9R#kTUAelhI=Tl>nK_25=}}9=1Pm{ucCv=?A6`x!^cShttvXs_^%^5>IyoyQK?ZO84 zsJ6s;^j<^U)TDD5hvdI5As6`pm14#)R8FzTA5}gXWnCwX+RPNur)iMl{kp>7ZEGUDHW&h}~@r(1_Fc zruZUyK#XRqCFD|fC}R^6%Beihbf|Ej*iMHEMzoT#mQbf6i+i(B#4$0uFq0dk6a>+s zYO3ib^crSRyJ3_IMtD#GHOJEo9UMtByg3~@cxq3hd@gMe#|cT5Ch5V~=rztJ-GoZ& zZzma=7)$57(M?U9Geyyi>8FHAoy1X<$FcEA0b>;JW|VPC-boXglO~l#Nq#V;4^hUG zr9&8%87%e>5z2<=v=A9`UPRMI9a|!~N=tlb37Nw_C-JQ*EopfcBz*P|r(}z0N?goz zG*q|=yU5NL!=%C&+aZkR@B+G7T^ZZME2(L`VqB)Gy`;K=jG(c?XEa7SH9nItdxz>I z^{%N3}WUrCYQRQhIM7|wa{SJm61)qGxulQjm@NT z?$)8gA$SMlU~CbscaJjDY|H44EuJa4$A*m4&>>G*Uf->2_M+N#(4mpk5_js*V(H(i zo^PI{Yc4JLp9A#x-qQp4&J3oNn@rve_I|_OzO4TZOQ13At;hUmd?n4hMQn{sDV5NS z?MS9d^WUYuIJ1i_>2zXpW~NYUUBr}=DHLZD7&<{oZOx7mo*>-1-_`XN(4Zk@QF)@8 zn%5@QJ5i_;cCon=g%d*>V``%C3Du3ElT`G~yOa%{B!o>%MtM@rElD!l+{tBBRuLOH zS$DHe5BpQ}gC%-w(iAPt-?faLDW!D;{lVr<70z`#S?@IA?+^T#(@smFTbshTo5s5w zAK7{_7N(V|NG271rLekW^W&zVTZjbi$l$E6S=azV}WM!AvBMr?(71=N0?Fn;E1 zg^Q$jjP3&A8)!>5dVz4G$uR{iND)Ozda&Kh5$UGqwwpYN3(_>$!~RLLr>$ zvED^e>J4jH@1pn=qEs;pBz>6{7nR?V7&3TqzD&}Gt+Tj*THHiNELTcRtP^7@H-K)% zct$%{_>+-;nHA+`QdebAVc4>q05!j$W3)gQOd=~g zdn{#{p@Q1gin}yp4cCpWmPsDy$kbGlal?KD{k^7A@-nO}7ydxTl}%YCmHy8MqgBRz zX#K@BwJOC_a!Usbr+aUvwN=@jB-T0_{2?rqExW49QlVv4YKX3_j!vy$!&gg7Y2;@L zUoCt>FlN1L0%+iC#(LLezLRuj+qyQMChP081Y0|2oungUW}TXN)xKg=OFMv7#7>!!i3r1cGptDACZspQIx zzEEQ)Dbm4_Lb_lYFBA?h(-^tJe9`~$M@8Qmi<>Jc$G~Q7i4^X48O<#z)NRJ-def+x zK4ctk)zDn0X1=jCQZ!r=%2c^c_^xft2o(jakytSO6;;xpu!9lZUP}4aW4+sJ=vM7# zeD6>TQQUzM+UfOwl$bly`biQPp`GQF+1EO>f=YIuF}q7kZR{gkW|wd&EN5hQ3nzs@ zW=*@*?&R6eoYv^NF=^DqwlfO5g+t^=Mt^seCI8%mVkKSJfIYEPFs%+1{-_~wR0rw?SJMrVGnF4op`S_5vB8JEoF&&8+lR@2wI@C-gz!W*{cvWy zJ}VCic8`8Y>8|Unp>lgNHI%BTl8!O{OCxDGGDoXu)$7B;^%+LwsBpqM%tjrPQnZ25IOa&p{GV(R zW)d8Wq-FkD)@;JqJysP@j(8fD8LWi+{Nw)gPfTX86^>`pvp_QYW4|~i`SJWPi6P_Z zxbR6|%vL+0qH;cAy(i+SC#hJ-I8jM24P&VVocK=58as9cb5bKNCyVK`lXz#w=&3sD z)ngdN(~gx=R>Tu8KEqMPonXCZiYYe>bg1yB(>-)(4c(LFOuDm?Vf?yh3q|K>^kiy0 zpF;QXBO7$SvYN2mjHZnXC#cb8F?ts&i&P}LGSy00`4Pi7FVCX;(}|5PFW{3NQ&V{r zWz0|4Rb5IdkQk=liv={K*fFhLET+ZIYBur?4{OqQ;rn&#ZwYy&GYsbaRV>pt-wb`s(NJH-=047%!E7eGvv6eOp9o*bda_ARgd@jQ z9V(nC!*pmCb*Iicv{I$!h3L)7&UVb1I}y?iriQ4Cm?JLqq&7{pM8f9@9_23SbIySr z*I>(`HFDbhG}Q_l)S6H*+t-ZCHsRJraH#AWKWYSrItoOdUa`kqfRutYauh43(612F zr28BvzicdW_ZD&7YW@U6d#buv;Nv~$%&K^W_%mU*YIuS)GJz-8SWVlnXi&pW34wmO z;z7bP|2mqW$Hs_2UQ!&*8UwCiZjdLB)NYqLLS? zd2(%x5Z8*1e?2Q44fP=L@%43YIX{zY@HatBdDZeF)PoYMK460Tu*S14xIY7Hd~rVk zeB%;_qa6h>+l6<2vJY>KiI=}Q=7GKRgsz%W`G+kW=d)UZCQYYJZ+FFG)1ef6CojOE z0=f4EQt7fK2ysEwxZafp;FKIH!@D{&aa=nx#IS?jon{;tc!?lgTjB4KbbsDwgk%3h z0+lkQv=`aV04Iu8NTu8>9t{$DE90@(SPFjdQc7oZ;kd0`1XQb*9_2X>lnH2x749nV z^&y*7Rv&USD82(x+J~e4agfmUQRRyV&KJPwIJD#th4z~3b4U2yK1lTO@r*Stw4@}F%Z%TWk1JDGz;h*x%*=~H=7J~#|`SdFZ zXJ+*zU`>3njN2vW_#s>u+WFbo95)$AIr?nR+f*jC#m~Xn*xf)xmZ{Am(tN({A$vVi z50T%84RXHai@qm*dzj<+VE)Bdq!mlt_{Og@TA@fv_v=nuJYWH(`}Im3>Q5VMTYBPt zCun1BTqK?-Yi@6#qL~VU6=Dt%#>Xc-yZ}A>DQPAMGWq~M%j{0_6i65p3 zjW5i^T^|$RdkZ|J57pSWSVHx?y%P5lW`6f@)cL~qI1Sc2D1bT9IAwH;PeDLDD*x zNE9p#nB;AxoXwlr4F0#PDc3NZL*u*h*3^p=t?4EVE-MueBfbgaI^aW_Xd4vA>$v@? z)Zze*0LJ>__!T0@I+cIg>E^aQQL?m{`}>JVn3wt&@X2bd6ydAk-LbI zx1hvd0czYTUI62Lk;073__c<&&Bh!jeMZ~&noG;0IIf$(!mqOm)QrZUUlqYPOlYh( zv%&>2bcgCSavaZEeO=J4VhSa@Vy&x!g3Ii2bHSVHOO^QgH32M_t2lmI!yi4=u+D}r zB8|8P6A|JvgzhRSW;vt2T8?8FSnJ4FN(B0CX(1yUX3E>ru`#rzy%X+OK|Shsd@LUJ zfzJ3{s>WJj^6$!c`Gx-cM))eloFmm^gRe%X=!Vp2?8m?|HSW&9Di^U+D49(DAqepw zvjSYz&yn=}DUsu|LPq#gmL?sRCE|oR{E@;Jhv*4ls)jttKlNGVN*jv#Xl}10(B%-E zE=&+9U$FI%sMbWppM{ZF+!XaDCWOS6*3zCNaLu9v#9nfgB6M^z!GncaVm>hR5*qVb zizfyW;G`4glt#3zT8YYjQ|N~UDzL^pgpAXf=_8b-#`7eYf(7BYFKd18g`)%z+k4%(%ek)lT-9J}E>@r= zvEuzqIhKMhUbw-N#33X}RI>d-k_Ba`pB~5>@QNgaT^qYN}8Jdn=t_2E31s{3C$zj+kpIfZ5SFRj@0AM{1dK3diMa zH$x4UwB?V1tS29Hxu(Wk-7Oj4Vkq;(Jq3D(JUzCrhViMGOQj1MUQx=30daatt~IqD z!>3Kr-6j;OlVYxvI1`d_v3Q6uSr+Ss+3v}5Q&2daB>Ulj{#drUgw{$a{#ihokynMItP%n#oEwv8NP8B>kz%qLcDpEm z%hYJ?ED?bYC}b}e@yvJk3$ad_{xn>=YEN57s}n!JXrO zJf`$_V8Ib76RlYFf|Oau3*QNmb}KEVk8nw zv5|d>ZqFL!sL?<%k-E`N3;lPxq>dW#67%VYK-W8<>k?w1rcxCPmdI2cfP%YGXka?l z2GKdD6(&d}bn|&|-bezO=_|ysgp66EM#fwuU(YYq;~Fd)qlP5Hu~|ze#057YP=qz! zlR{^krTXej!K}!aRLTT;?keMI#N2sbLoR~0n45;;_?C2-d7PQFTRN{xVR`ekvpD1! z$^NLZhlRc+9}&0Gj;4`P{n;vd&LDlg9h9>3{4BmyZmydo^Fc`6qIh`+DyFd0tK6-nu)#q2rxBBM!Yg6OvLrn zqg`hd*gaJkRgtJil0$Ez8dcjCn2V=tt708-vLL!DD+`YsM?nkD9w+z-puI2R$~q8e zpdUMuAd9u?Mnqx7jrBRMz>MQgb6l;2be!fDJ&FV)i+c(jyo9b{@j|7Gxfb>2XojE} zr5Sb}Er9k)G=M6ose7PtezXVI$eKf$*NwQHy@{bTo#cudA_(J6x4FtI`67tBtKwK_ zYkJ#C?0ta%A77#QWhCEN-f}?PM>5m$hcz~$M>=F>rox$F0?4}zXGoKTRk$3tX(Y79 zS_#D`vKnI|t>DirLIs4ZW<^Vh6;=#LPjUP5zN32bvTVj(Y zM-4VaMK+TZn0rK(VUw+H#2pfGgKIfjZIjK;noT-(v@+YwtFeOTpxIEq#5u{AhXSuQ zk4UXarM7vmkWG=IhG~eMvye^wWVt)fhP5B>5TohB>d78BO-KRiViivL(UgMayNpPp zyy3$?JosM?-)K9o6=oh{TOh?pd(v69MJ`sujEuO>jYWueOO6cgoaRQ?CLd2QoO{!<%@z3 zN+gtc$lgPZjh0eE_R%USi5s?ug`QNUuG+EZvkfyU`&=m=z`%SnRN)vzpq*~|ri;bg z?3IKK`-g!tZHk-=xY(4da0}*J9BdT0hcL;(#TO4{AU#yjtl^O9h1Zq}t^9UYI#8A! z%H=qLjjdD|_u=#U7BOmA&Rf9{U)uVfr%)`6tq;Jb1L@cnzWf<#trG>8xnT7eDqV{* zxzdwAfpeQ=v~FyOpt+$%g&dC-)@V@`jT)YkGNGkApYti%mf;@wqf<)?W@SjjiI+~M zqg&QSBQBcA>)3@q3y^nnv~j~i)1o5(-oj$Fn@h^bm#PB+rkST47NrDp9!+&>bEx~lri z#_$R3G~&3vv>tQK$kh>UjqP!nvxI_<=HlCAKU@1a=nfODN$qGzJ+F1Pi}A@OB5wO8 zKKoWkxi%TJ^{9p1tC@mp@H0Z}DABr7t?XLPasE8I%9umUD_Ylw>wCB49ekID$J+2# zz*&)vXcLt#Cto93?)A4IbB~tmT-`lhDkmwqVN5qfVhL$VmCG}Cin#gwZ^2F~fqSMD z?deUSX-47za>Q_7Om@$mcQ_`qds!?J3LcLp7Px9OH5w0%qw?Td%vJpob2FccMgGEA zkGKF`fbdwW!dypcE*>SmQnGF1mk2#IYMo9zmEL+kFkd2&-Sv!cLPLv5vF$nDO`qgO zZoLt~wz*<%nIRp2I|uhPqf^_(+e=AFI#9@K?Ph7Pzi^;xS5l1IZ=mwEPgJtQWBV~z z#eIl;ULBj!`$FW^w*@;NBd;tgIe)6+D*NzfbVr)}yb64cx!W>Y?{gYsF6}X$^SlS< z4pF1@_6U}9n%>m#SyAepm8O&4yT*)vc91%XJCk`GTKDBEEb_Gmb;#zA`DNr7&|$AN z=6tD!I$SZs)k5Ljp^879{4;=p)p9ur*A3lxwuB_vp^lG~)zs#Er1rcA(6gA2OMotz z`=olvxANgp_ajXOa`f>oDaU+vD)dP#;zJX`4x3ADyg80t6G1+y(|m3RNZ%giRbSGE z%=I->O1Uh4fUvx;y%}yeNdu#=SB8?bHXPwl7Jm?KFkVcXOES@+4B|e1&L&nqF&r0l zl}}VrVp;^8*eIgbsMtoy`Dp_cO?l^!v*CnzxhTej&7OXN;f6hpoXtv^poG^ zNI^n>4dK)8wiXSfy6b2esD8-jg2V+%?sblm`@;XK7Am>HiU4!P1)6$ewJq%pj3ka47&8WQB|<~FBi@`L zfHfYN>rO*zki8rQpP^72Yt+!qm_T6)tgffR1jTA}%LQcz8 zb>?SfofjIl@w!0TxkxG_(Ic!GH7n9`zz@H2r*t~=nkgMOh=5%j`MU5dozf*B8jYgo z>n?fIF*7u+nOy#xKX6rc;1E0fYVEg+?~z!gy=;rPYU0J(cM4?o(3C(w(vde+YG|?o zX}c&0G{#kTlngEK&?iT$t^s6Awf783+v9k{)x3)0^tdMUI;X3@C+1?Q!FJ8C;_D<5 zvbrv|kdc7-=eUSuY0|QD3iig7Y?z}x;tGf@hlSa)QZj6~2}Wf?_vBe}lIR<(|K_`J z%pc9qQ*DH&g`-sJMd3&NbeT82E?B36Zocto(@+YnG1uJybSrjIM^?4sbidttyo1~D z=aT^jysz?Q{z_q3b*kQ&-W*?@@JRQbo8-jo8}4)H*HhZxz0SpoEYq;u5H;MgLBmME z=Oe*}N+YytBpnjL7q!edBly%dB|re<6{eps;_O9S!vzr{0k}UKlo5x{9wNGkxE`oG zV@4{iaNZRG)bt_G*_No`4^eF726N0#*pVfC22Y_%>A{)d0O2Uw!^~d%)t3P6n)2$V zC;A?da^2lzk2p1MXsINo*kiI%XQS_MWWn1TEs0eXxrnYG{=+&D6dBj4Fj&o3=-6QLZAQq3aI0u_$bw5AQEk(g^s zZ6#W&L}iEi5vYl^@qjvBlYC8v$?bd&QHQ1ri51bo`BL(QiWAcm7yZZ=_p|aMV2nyG zC*C#kr7^m+m}()$@vlS0WGiuja19!he-PcHajI8%Fy`cR^$(WlfA zlZ;5fDlfc5NCkcQLo%D(SF6;?);CtDTlgxhZ>Gpo96`wT+sP;K&h$LpuRI(_-j@+D z&df{~BjX&S<##UfZou)y`67DAkIQltd*u;eYcuA4ac8X2X%ixU+|ypdXH3-JLyimf z8dESpu8V&C!EA_PC{du1?|KjS1HS zoZT=hoCd_n(S_R-N;b!w5N!ucwniOj#Wf(`TKtmuO2Qu!um&7&f+MwbOsp3P{sSIq zbPEoYYjjn}KworRkm3rhWX^lY8DvdNUo{v5Jk1(dR;%%oHft*eB$pN6@I^itA#{qSt z6A}hmBff|s&?E=l5(x!&h@nw69o7OUh_avV_~%2mpN5ZzaflsXkkrj0|5`TQ;%Lxwu~;!Z;U&_pF3 zXDxsk!MM2>HLRhfys;-z-iMy`GrY;KS1jS?(+wJiTv1Ie+1_DoJhAsbbndW3t#~qt z?8A1n)w$iUM~TQ76Neh^Z-FDn(8l4hE_$R^4=2hRKBFCyj-k+D1-@3{LPwm9)tyaJ zt>Q6PM^8Jclb7k-Z-g=xuN*=795FebdGCmaPI9g&h-+4Oa)CS>yS1P;NmwNBOGRX=YSE z1?C3P8KY9t^ob2N*pLc;)V)6V0sUr;(lmTVE1}^ijn-*tvy{l-62x;qdb5NcIDGI8?jl={5yTGD=xT8zj>uUk{dRV zTkipUWN*qhWN1n7NU!-I;g8we86KXSB@=xk?`)YUpMPf;%S2AJ-BOgxzl-YZ_~&kw znP?LK-c=)$%;0AdF?Oby^6oh%UHpfCjZ4K7MgPzdc04-CNOX)xCZCVxktt2Z#G59R zO6jWPUSw{SOjO9f*Hzi@&jm^s5y{poN^SVz)3zE+;a~GwiWK~7KK*YJQO;T|kK|N| zjv+@H?-xs*L_7F*MSc^}UGgp_rQf2O<*8tI3OC!zw+(H-T1cEBl5)}DUYu~ z;u0djfinKNepDjgwEnutm49uhL*uBRL>=qYHgvId8gc_(xQFD)!qaE@=jIu+sE#V~ z?L?jU_qDY4{Bz3?!uVFBvpk`#*TnzwukAbal0+VSYcXdg@)5R#Z!Yd3TErv!9%k{# z{@#c<(7~8h{PW^gm1qW`s3zhcYK&q<7kPBQqp|2P|GqSyuu?((7l!D_*7y97e-?@4 z{Ojlnlua3@`plP!EdFQz)%^41NMa#JK9b!mYh27vDLXC77yY7JXUwCgM~W)>*V!OZ zx#*d|0qS{!#K9ZcCj9e!y8yoX{6tYH|0-`Sav|Ifm_^^$0+I!`MD~AWyfjW!KshQW zN-l8co33;fjS!oYie>S%Mm&D?x+qsn-TQX?#-f*mP-RO|JN~^rR>t>V-$wqAdFV#0 z!C!MLP)765JFSn2mJ0?;1(b^I$mX2SCz+w5j>veoW1Nz#E;kf@kLKMR$CJFfRj(!P z6x@qYd2}OF64a9misIWJcNcFIckj)&KK(o?BszG^nAYvuaX|@VQUa5cMrLq9(Oggr z7u1Uj>dghkazTB#puSwEKrS+AbSl?5b=0WDq|sdDgp8C?Be_7HXkbtjPwTN(D&bbB zAFt;AKTt~p1QY-O00;m8000000000oaR30@p8x<102u&fZDDM8VRB_IFf}=b00001 zIgJGXr(d)l=uBN-`CnugyrFmZb-j69j9u6Oe9wK(Xwp=Z7L$siD2j>FpePkXVNgUO zdniH@g%Gkt_7Jj%5JL9Il0AF&5cOEIcS62%o%{5@@9+D5-yd`4y3TT)>s>jV>F@j+7?D>vDQ#;s)cf47$XE^bF;{?=$r zhe#p|cSBodY$CD+cIcvI0g-KUC$bVpaoh(YtFDJ!EISd=XbN(&*-yk-?&!vqN+KQ@ zgW^(85LADmm>EIPrp9RCy3qukwMK0Rtt0569cuETjG%9B29fg&f8+^2DWNqp!FsO367|-w<$5$+=CcY3_@u7I->A*M?bGTB8tiGXv9`q zqS$3f6ghen;}T00zXzb814hKK?+nz^JdYUOazRt(y(Naf0@0CB!25tqFn5NZm<4F zlzZJ#T16|OeBzEmcO(&Gqu%J%(*j}~ZiX}oFNyKE-snYNJ7Rp&4(*ERLX02wK#l)p z5|btqk@+T5VzSZ+h1s>LHaR9cBDT6qRAW06Rjwo2)+LFk?ph+(bLm7?-Wxr$dq>Q? z(vXeyL1I>Dj1W3Y%x?8S=4Wh3JsV@xJ;jUEYi394>8_*sS6UKt^KPiqsSIKsJ^&?d z`a#SOyQ8DQF2ww`o59)N6RhZmSeQ~ehUq;!P8KWl`2N8<{>(G#&cEqylYGfO}i&#GCW@I-!S5 zzqM(J-yGZC+cRy)iW_a4gzY5i;j_`+W;&vN?TIYyrV;D<6VX@Oa$>!}QhZrmZM{is ziB7vOAU4;W#QE=t&8MbFal0Y0H6MyHG^dH}R5OH+6cXD7?QpNz$e!311f$!k`NYn& z2m0EuoY;jwLlz?+lZIYnP-bN}(s1)M^d#XSX;eP~HEY|EG@5LU24B~cMvFJ1cEQHP zzL^Vpa5;?F_nLxAvnXC@hJv3v6Z^YnDDAv|wf$?EC2ANkn>hTMhiue zw2#jGLmZm~qKPl-5y#P?XzA41#POstiaN8JI9_gtPPe^79RIoC)cay{;^Z8F#G%2& zX`C@yws0nKni`0Xw>wLmPCB7^?)8Y%ix~9i^TFz-L9*UPi%tC9cE@yayK+NEnr${D z&2sEXvtnB$j9*8Zy`PAD2F@kU;l0ru#b4sQ*N!+}w?&B;dlBdI83w+$zP6$t;xgO` zb&}f>mj#x@B`XQtHvUJN+qfHbYSZ5Ri*ILlD|J`Wyq6_f-Cz`HK70q-djydd2JO(h zA}`XS(>!t5Khom-B;u+qGp#W8^ziEB=~30boyXlv9f(^Kcj6YNMI8%{5VvI$MPUJP ztEneumXekxjnSgVV@bjv6lC=Ex5%pOUN?N^eGrd@`-mCEUW-r$h zTfMZOwi6BY2Ytyyq6wdkjBRES&66}y_m#Mh>TIAW+um-q=}zLl%Mb0bzfauD+o3~+ z%}MJ<#%NTyA8Fmf47~~LLt1w*LlKg_6OUy+Xr4(l@wjG*y4cMm9xtqk z#~+=6N57UG>4$jww?pzf1BqwiY?SNJfO!7yjf5U?q-~HBdOvP2X*` z)g|8BmB_i&hj<@|AwKrD$aqTy@#!#t_+$i_e!sOyx6p2_4qxA_)2+%PKCfNS*n3w< zN2A&3-P};paq(7-N}nFnjT553v0`+C&X{8I= zeL^IisAG(FFd>~Tk0ibt1@d3Ng!p=eq707*#CP!&RPNT3`2Mp*ogc&yKXYT^=cpon zT9sHbmiTpPi|+h=LHrW^kl6w+;&*a@X@}h(N>Cv>&lODa_z9yX;O+)z>VWjg~ zH`A(TD|&8@+uAdD`_`TxvbT}W*L;Y-Qic8w^d1Y?`LR4n?a<@Lt}CL3DV_N7t+;LXVCv`o6r+_PtujfDz7^6r0e29(RTys zdNUqbR<0sCe@~)IawfWLC5oS4pXd(TiH{12?q)x6!D!O0XF3d)r+H25}gi!M;blXNkXynTKc9Vn* zZ-;ucbRi*|kDz74lq9sT3yS{LzB)8f{DQh|7*E1zyxn#-nS@PsN4`mSNZ794C{5Rs zg#9u`ohJ4t1CTMgw{;j9VA`7u7-5SnUR)*vW=%tf-*zAaep#Y#JqD72Mowb$O=Mu} zGswZQ2^sjg2YTQ#iiCG?M=zGFBH`oAkYCG_Bs{T)fnuU(H2sk9%MK{kYX%7~w?iK~ z=aNClm<+OT6dV2|gW8%HJTtP2dGXSU4C+d|^;)CUzyUEr-ytzhR>R4l#S>B8x=+X; zS{_Ya8$u%JkxtD{A`yubk864+o>N))Bg#Ys)gEt$aMq}TQ!F#8XAh&bdrR6nYxne(-W?TcbBqpKh%nBYu0LSc}$V zWGh25(p!z5NA)Em`&!o@+kIj}?Z@;@e8P|MQxjV163NKP6UoSmU?an@<!HHEHXApkA8TJBx7ls2$irhPLr6f?&67CBqqKoI`P4d#B6AXUcZecF%SQug0U^h_`88fXfU5lsPAV!z16Lh z|JR31=LQc|By^cL$m9*%D>$I!t0wAUb7SLt?vH zi`T!9*ujg?=eqYvZ23fk@gsXL@EFvWOzgiAd6c&x6Tf+iel=uL(|+jahficu;uqxW z=t?G!n@A>?R-n}PM@ZadPqcB>2NL(&5?$RaBU4N*$rNW*ohQBeE-5o5Q##f~jc-gK zQ@UECQ2VuH%8)?hddQYc*)|cCuewR%&E3$`%_bzi(*UC<33Hb64~bv9j!g9kK)+w- zlc`gup=_5>GWD~u>A?mA)4gv;q(8biIDJdR2$F#OP~(zZlHeaf61ERQp(Yzh!as-V zX^t{`S|_xy!9p_qES+1d zKou`nkm(PtQOKd)Wcp`2lBiONO*2ViLlfkp_(u|3v=fJ~B#FbOqqh%Vlf)+ucx$eD zHA(yvNs{zRk`$&Ce|{oK!+Ij^@gkCRzz|(-cbg;~bGPZ2Rp$a*%gL<0W@J`D4yv}@OOnUUK(;-CNOHM5 zx;fw|nO&cj!5c1++0CpmJ|NpbX8WoPmT$4%aP)d3GP{=x`VrEW%${nDcA0e{v*)@X zr?3TN_Fg-KjH6pOOpV+|X5X5Pc6RV2b9!|Xvx~@_HFFG9;cqg1!~Y|5>szA2H8o^z zi`ygxs}Pz{pQM<(8#q-&Y@#2MLhrLfhewl?aAUNh%?y$BMa9$ z;4M?}4ze&`OBR0DqpsbqlSTf$(L9}oELv%ehNpy(MSCZr2bW%u#WpTR)vIiGO&Dsw zYxm10WO3IR)M!Q#S$ud3>QX$JENK>sUe)|ZmLyuE@SO^>ghq;DjSpEuZ+~BtyOJfh z+>xs9bh6~16G?McB4K7BNo(U`aM99SUOG(;_dPr`-RzCbBHt4_$22fGpc!Dc0LVmhEqjVxPy7We-TbB4Fr1`+ zJ!*7+Qs#lF78?&F_01y7_u5r2zb?Zl%V!W-achdvw8R|;cm3OSu-%H?WMwa7qszAE z4~F}nKRCWcAz3-Z6RnFkAS+kyLt{7WBda>Oqw|eEkyR5m8f=)m?J)h2)kdBOjeJN} zccLM1y){`q?h9EHS%o#*>mDU*!;;b4ZYRmwhk?lSwh76waYOOlhLMb}#%Q@PiDdNs zg8GK8C+qyp(DO-0$-2IF=lYU)=jl0>sCdh14E~gb^pvzOkN6EZ#oqF zJEIe1eXKWGpL+`3i*Y9##`z)Fw+qOIz19XD-L{_`9%}*7N#+_8 z12a@~s&eENlDW+V?>ix!CYjfr(R&*wlKE|bn7oW^>^=}(T3kRj?lLADa|6)ayt-uL zH#>B2KmgffYK;ny#E?zRoX94<5?%G)L^g%bFglbpLE4wKEu z6FnY6$Yw7yvN_lm-K^tIHjfTOeK$=doA)}QhJh!@=34{M#zD(%GNj+y}%_Sp=(@WWyh**c^*dX#pHY&~F!YQBCZTMPZr@N-(S4LPBmH-?jK%?!yl zj}X-3!E3T@fs@hjq~@2D=iM&#oz$v&+X2}CH1X~vvctgGwCKXhOM3>czVy1&mP=QU z=8zpWmSl%VE*fGmjO=)AiR{g0ksZH6(UV;-$<81<(e@9gj_ zp-&lCns?t#cAf2E;=WFKHEE;q)k+VGs~v}sY?GEEK2EaRk0;r=Dm13*0?9t>PO?7) zpjQDpvb)u3BWtS$*AFOcueV%iM|RUZ*8GMm*@OJVzusg|=Skv_60+xz6$etIwQ8Ud8vUkC3bT6|D$uY1)CR4*mP7^zWZXt_rTCG|}a%idF#py1|S=JjV zjr)_F7Y^v#g#{$%-#nC))0XU;>p=EpYmxEWLbC6LG1|257TNc{2MStePWF2lll?kJ zRR2y#vVWW#y8L!Q_5QW8-3F67s_us7+mPI@wC-3kk>sv)LW^7OA-Tt_N$$Pt25S>@ z@BPa@NDgieMB|?tkb_iy%l_GtgTK z?x^taZ*ruO1Iqd8LymNEBS&Jj=wID+)kk*8tPMhke0prz?K3%gww+P>;-*jH?OmTZ z*SL|Rzs=CobboTp=!?F}WVNH&&ooBFTamw?#@$<keoPah7?X4$%$)b=;g%|6CvVBDMXfhE^|n2_rYIn%8(AAIHUIpoc*>+`jLhZ>J|KsSzA}ey{_d(QV zV~gsud(jr;)N~Rlq*woj4qHg!1Ur-aTP~G19CfYS)aict%JHOdMMu=^ZhKNluUvMI zc9IL>6H(t`gUN+!ZfHl%T5{o+i$P?QoButpzDX`Nib3Udy~xGItI>nN!{kyULvqPA z*Cg}Y;rDwoPrkQGzWzS&KXU2S88knpKe@cw5`FQGCYKL)L#>`3B$t18GpL&8^pSo@ zk-r%!3bQp@C7=0mUawgn_nk~8MN>zjdPXLs=!pZlB3KyR88-P-^Mq-i%v=)5mHKw* z@aSdLSK7#8P{~FIa&@yAic)IH)dTGeY^IN|ob%rVaBwr|FQ3;1epx>~=u5m| z4{~i~Ai4I`Ui{OWT&r%2^zmindMh*3XzLGhy^{l4wZM^FA7X}vc(y0kC-gwR0XgLQ z$=Rk;Ontshuju$~)Z@T!)ys9{hJTv4`7yb1a|IeLizhdS_@S6KugJ~KPN-k&`qeiJ zWis^eMQ?Je(9E>Me-gy=F-+`>(a+((N0%}0+vohQXl z%#e3(DJgy*fnxkSliOW=$n8|6!KNcCtDa9;LvF9MM0a1ECAW83qKo?9f4`XZs=OtKXTW`&0txtLBG#)kMfEMdkOz}z6mvCcTX>?Q!;L+YVdE!yGj}GNtuP z^6-KyJ{;69oII?w#SI466-kLwLrQ!dvC#$p)1+jC3cpC{Y)nex;;^daJWNU|H2BJ$ z88gWvbrha+wcly-C^;4P&-2eCj}`W~PU-H}*Set0C|?8CeO0G$+J>N>^x$F7kQ4e zvFG+jv&i!t4ek`7ok5;I)!?%+^S_W6$}l|Q-*OFkF(M3qTF~$fd2uQUFAD#wB&Aws zT$r}%D=Cc(z?M_*z9OaD6Yy)pacc6?HdbME@y>;bJJM&8mrD}xVb^&M= zkXKqYc@>hWeEYXgb}Red3%ic*+=9INZjWURy&IF)78)F7=<<`i4$xqWhzNnaj?-gf z%Pa55>kBHZI2YELyuPQ$GhV(eAZ3oWIHOz1K~m-$jE#qDt4P_BSp2xlv@lZk#}Uix zT`Q@6V=v3Yj$>2($=iFrq+IQYTfV)$nv{E6;CB1)J5sLK9(XI_d4F$;BhGVFG+_jJL-p$|1N~#KGh{j zK+SWn(1D6wwM&K2Zcild@_;j`TB=FZE=yIgNY{lPcg z?4#DsChtBd71Jgix}V*5oI821RAQ6ir{|FO>I{73PsCF4K6e#9F{(0%di^hBJ|;(FxAne3Sv3T8x3?2DgX@QqmZ+5J%REVzl!8;pg^3}r;7cOwkC11lF@zQ{Z z;pA&lB;I?p#SHSbRE0fDm$;Izm0IlXJ$fGb<{XUO>Wr@<-x7lHmn&r#$+rtn@jb^) z<>Y%-6gK#hkVJkc60pV7AAiY@cGOU|E#gWl~jUi%;4V zuOUQG;?C#lI1{3@$ESRHTNC2yjolZT7Z4I$sb)be)M(N-s(SjGQINd zpc+-M*KeQM2Gf+JMxh`z7WSk@Ym1e)Zd@ldA+h+wzy_;H%>_Lc?#;bK{#n?Pe;&4Y zZecbi|MW?UZbk+XM>Xpb$iM53*tOM?=j30d#UDW?D3WlUbpXtu z-n(@LY$8{L;QEtVwgzy}2ds73Fo%M5bgnA1Te0^~e~LD(__fJ(%Ys*O=%$`{PK>Jo z@Io?vaogI3f(8Lg@YRFbVgQSM6)!hw51lu+38tt*<%?}5;kf|nwRrk;vJJpgCI0G^ zGZw&ADvbJ1T?OEF6|Sr*j0P~T2&eBgy+uL8BrP^IkgWwUPldY-_JX;F7Xt91Mzgw7 zu&%-bn|-Q`1khWDPtQNu9l#V@eCYZ3WdLS{;n8U(ppUv$-gtMc6Nu8t!vdr9Tor&i zHMApiij{ra{dB2&_yM3fTCDo|26!|oOU5p@BO6dqIYN*BK2C20V4fpxni}N?V2Lx{ zHE%^L3K|O*c-YkKHUL`K+BZPsRJg_Kyix#D zl5wl%Gk~@Uw#7|j-D3c>_rTGfJ!b+KqQPSf>iz&QMThU7wQvWpSdRzh{dbXqrjD+P zplK)4l1_M70~)5r&l{U102u2HZLSACSnu!!0L$!Yt5W>I)$3Pxq@Wqj$JP&`2LgC% z1a3L^MLGrR+3WC;?g6;~MrGoNS;s-i=GX#ztSvMEP!0MrPqoDs`R0oO+#W_-3n4&t z&t=@u?bkM47J`g}uATYt5xRq&acS}QY5?Ps@ukAWJpuflj5nP5Ii)`Z)sA7RpsI)UZtit&EzF9$F?rd4E1)|y0dKl#9YH~B!By4dpy#(I!fMm& zd+uhv9G%%7x;;Sr)>PLHF5A2SOkIULo%L@@!3MVJ_-;WZQP9p;iI-gK;tybO0JN)g3LC>dr+)Nl3M|;s=%4ZH*DwIT z2jGVGCSNGnP#cGn_FM!XYnTH{X;|h=+eRuSZarX=DS)=N*xa(;9RRfyu zfH5?x0O>aR6Nc^nx-FofeTX-<9@MQjfHYWd|6Mr|z z5G))#ym9w0XCa0<#OC24jo+`KU}MJ-*u2Y{G5}L^@#wpOnG|fIU4jeV-5CVng+aKe zO+#=*M@I{6yr*FmfZ8bR+H*4am}8a_J2o~u0AP+I{+>2)H-N>CSW#aAG1Bq7FSb_> zh0{5CWMGT1?t=lmuG8Fqux|UZmc|7jxXnnS$|E^h| z&&AKUAv3f&a-ibE^X+t5Q)fp!xS0(|uxW4-)_7$-`%A$hnLRc=rx`%In^k~1oHf4C z#-(Fp&sSiHE;tq1bbI{o;2v;&m)t14y4L`R*Ui-l_>*;5GXSZVWLua*=x$Ea%CG@* z76Eu&i?s4R+W3 zm`%Y}Dm{L!+UN;jq!M4*;0|21idN!%^PlboFhLLPbz6L_{3q-h-y6Y#CLuIoZ+U++sdFcb`+EYQLHZKX1qo)OgTF-O{^KGfVYm$%r0WdfW z8(BA82;g>KJVzUv4`5jezGwXdD0@W}V3)}U11YGbCEIq>C1(MQOT|eoP5uJ-$CbA2 zTy;4A%6*9W?c%bOVHF4046WF^r{@HBhz;%4gA_+{drjYb+yNqOdm3w&Z~5FDz!YbN z;p%4b4XQVTMBC>CD=LkLCr-+4f*Jn+=t9l=98xOynda`qp#-EF(%#n#uaplgH& zPHuRsI0 z!ODcup<|AHC>VJ3^#n-5-dSqwGVJ>i0Q1Z6ti?<6DCm=!fXY zyoAD`qsAF0CawfW?HH!Ty~d=$?j6&Ev0;z*zX7DTD6O#xNrr)uvFaU`duFYOKq{e&1i6iPJcA`yk@`|3i`SxV9!3&AWHaBHC>oL2h7&D zOpgux9?GY;h%+=r> z9UnFZkX|OO_j!Sl2VgC3{?PU(fWCh)8lMSyHPAZ%i@C?Zf&!C$EyAD0o`1O4v+J3n z2Ajek|L_j;`?48^3N%W$*+o4kHJuB*V&qkBWf3DS7rdOy!~0&s){4qKA} zrVx|>1{9R8QY>i1Kw{+B~2YR1|>6xpad1Em#?gv%Bsv z2QbMM-&=TOD}dQyO0xwm7vUtQ^Xsmj0PE=?WI&r4fOYpiLWSO=Gyy-pq5!$}3{$F- z-+WIz)u*Y0Z%glf<|$d5pgYF9+~? z0^T$Azj6xp39!fi6`Y4;-DgCY7J& zUk(`fAo>A#OJ98~wiuTJy!I{d#dY`Z^rvzO5kM{w+Hc3Pjk`i%4yBi@&XLA& z@_rT&cltT!V7qqLDgeBfgzpN6?oqJ6UV}T>x`6NXPgdazNp4`){nOR>Ve$Eg0A{Nc zE<>-3F7Nag>ZShq&Ui*%83cv?rAnOG=R3G!{}0Z%{%m(B9l|VZ6@LCL=3iMZZwaV2 z87pu82X-Hpufuhoy`4jP8Be(v6Ee}Yxz(MuzLF^x}3JT+a(JFj+^#{0< z4Wu{!z;e}GRk_1@Ky%dC?#v@LazTmRb6`{0oPPpe$Q`UJA8p#40_O8(aXtiYWVl>wNP6kAj0~0o1XlJrojy ztHQAFkIu095PdHG9A6i5dZb{F$tVq2M5I0B&&WY~Y|_aDwvWuwW7DZEAhbvRQRCo< z>2NI>Y7vWD4?P6FJT%2wnG$5)KHJ{AQKK8j?m|~?GbK4KKX=mVS_yvdhIL}AxNKKEYD6*!ecU}Tr|6<8{3&Df8w$o0Bx8e*$&V>G-ZubWDt}}MV2UeV z-qR)tz+5GM<-Hb6Wc;a0?9#>zLd}F6npNhVgIaIGDO-%E-+>Fsgc1cV{%nTE0ZkL ziiX1L>AvH7L-v~F>W%LnE`@MEDJlcICVak6!O4PBQM9V^_OK7GAcVX zJ)H69(f=UUOdb@5JD#iqd2MoD816f=HRPH&r5<(EA?M+m6PKXIX#25e z0H%Wh#^o#VwO@az2#bQ?el1 zP06#x)@3W9?3_{@fFsuIhvFsP!V#Axj)Mz#yfzH)*VhBHh))OOn<^-=L+80e0aRMx zW5^c5@>Kh1Y!N%93xG>>SopLalFrn7!4@vwoW4G~C@1iuBkyI$$*gr>QOQkz*RTTTXNJ#j5b05E7?FJL1wc zHed|X(xF0}mKlQ2KPZ4J&9q8K<^A1@tRMI^wNrHt*-njkx>^>7dv*7Kf+{gqsTx^V zd7zo#`B`iZV-^RxKKC*iEiki%vs zxMGVdVVwco4uYJeR$Hd2<) z0oVOKcTM<6Tph^#2ClK8N1>OP)H00$8QT9Z)!2?&j+3 zaif1tKLHq(f)!Pt;jS?E1K4zmN{ic0iZBCEr^LUFE`v>{gaqTL>Gq)4lq@JWQ*xb< z{N^5j?z!TD=64=Y70nZ5*@~g!4*%uPV_^6Bxxx5h$jbcy{&B?9mj*&YTVM~)y1@Io z!s^%g5j#FMf=WD95rFF**TP<@RB5Lte1L0eY9zR3YNZZ)N6&(?ccB)XWMO0+-k;ak z2*7)#SS>0cP%X++DlBEM>l^>74=*Q+3cwf^mB40;zN@fBiaS)qi?Ld@K00d7;OFjT zq46H+UDmjSLpRNl*9?7N)5T#L?Dk?M*u>&gd%Wv>D|k&?oDDar#U)zoT)2K2J?s*d zOs%jQxob|7Pc6VV)10-+j6Yw8*IU}Zv#0Af&{oi*q%xM7QDwWvD%4Yl$+_%oL z74Qrays@=>Jt#H3KxJ{V*y7d5!=dB5 z#d`?LA1oU4R}B9dBaD^UEDo_(1GnaLDBz6#S71b^G#wVpZBD^MSwJ zQir^HTHL2zt`P*S6&4mar2Teqg%!>!Y};v>2^Gx>txSy_^GdB~_e#AMo8+3$05Dap z+nS%uCYK_cr1+w~)qM(h4~&BxD6hz*z><)8Gw<_ ziWv#(dg{+zdx&@F zZ45qz8+@n&Vw)5e_^W)^Z~$#}c%A|m060RYXfuDs<~|E#@H(+62cr3=0$XK$ReICE z83!Y(Zp49WZz>&u>mGdtwZi5A(CX$W3#Db#j6Ur~cy2El*BhP$Hz(6NzxM)>%4Bn< zEC#)N0jzFOsPLK98{ueM)LMlsKinqTj_0n<~jnexmz<-`275<@MgF*+a71dzlKZG)*=;tHdPJzU~5S* z{b=s#Gf4 ztbgrk=KTc9kX?T)aM$J-D)nrY1$NaeguI>Y=z;OsrEZj~>@b-te(~NC3~qOdtHS71 zW!n~ZUJx~R=W6l0Pe(zzyDMyQ-lPE#y%{06 z#qaG<(eC|FiN`x#2UX`ho&84;&F+JL?~8%gu45Nl(Dt;#PKN3CnWpf&#QsQUI!hi2 zMpsb`RrzhT1B{C^X3{V!c7~rg#Eu#C5Mtm*T4svfl8t3l;9`ggtUr(r0qMY-B{E10 zx#NL}nm52zO(~@*dZYs7+#;ZyTLtYwXZX#Dlt8pQxEhS-;NLjf9*TkFeCT2rP!b!C zqP&Wxfk5eSBs>)E(}Q>qpVl(&zg}v=zHjtwLgzJegAkH85}tZKXaPRr3}QbLs9}5^ ziHGcQRR_FW&0yo(;X&ufGq|jm+rv}CQ6KmT;P`3~{?Qwd=nww@*&VY3H~td_^p5QV zPLBOZqdWKsW-E5e zg`{#qA4p;Qd7zvV`8wL3gnCGv6boCPbOKgR4t1txcXFOApPlFIet4cf^B0yp(tPRZv}7b=d*##={Im2DToEHyk-we zj%0%%3$?&dVH7+|=W1Z|XA}tjf-&41&ZmMiUAzcnFC@b&&z)yYU~Mv#64!sYGP5xL z1d1$R*T+qJ(+o2Ok)Zc>pwSVVvqm;by?bvU zX5V+yv)xPP8!@Rr8Vp>#fwEDw_NCV*anwiH?fzeQvwEO~>-o7*IMzb}tnHy1UXdU7 zFp`c~vJ^_Jhmi>q`tSj`dQ0nmBke3w~d9P>LaOhhy&$dCePl`h$H)_hKO1>3VNQWgD*dgfj5TtDnn)! zubbM)m4d*I`Pd4aA|wIg!Hdvs;5tzd!;R-bY$$buWO6SGW|n5d^`P{PE#tE0C`6W* zZc-$uxzvQYQ}Rw~XJWVt`0`>-`|^zxSznoSg2*tB#T2o3e0SOgDI}L_4c1A|0gnAz z4s`z%!*YBL%rin2Jtt_DKxbg3J^*=S`-%#L@?#bgAuvm-XNNmOn>^7j1`i%%klm^!KzK2i< zfPQGhb|b5o2$tL1M#MOHwbQvTq(;ZZbYqA5ArA|9R9Av@e`Ua$O*#c^-kJA-$+Es^ z=3+_g_x`}@4i;4Oeke4SGF$7LxE}f<+@@vBxJLeQ+HEEn{V0s!3%Df#<@p+p(VbBf z(*7)m`i*2Z>VgPo{Yo|$`$Y@D_xS>B`9K8F`u+y}9DqOhlhDsFt9oidd+2|l zTW|c$3F1nDypt7#e$HPZ0Co_{?xeED;Q9zL{L)XKwivfy(ydw0?g_qe$=;B;jkBBl z7)uc;UvRHKf=O3U#Id}`Il%H4G_Na{Y>YqIhly;G!&hsJusk^$9G$XYt?m@|MqX1E zrbFu^(qG^CjUi}~d5BgSqqi6CqWn0lFQ%*N%rT(igoScR5JHzUvPDK~mhXkZrf{HC zVW>{(#fA$!einvs*x_G-QKF}WrbBe=!2x?ORxEygaLavL{l;m2ooii_SCPn_NOvalJV~o?1X? z0j$TCg*t3M#P$a?yf6_I{wP~wpNaAfQ9l8r61xx?>dD`Dbnt|>H%wp%!C)Zad6%TA z+3Pb*-@BcJbHOaK9@V*)|04CJA(2s0@2eLxscy&>_^s}}KcO}5PfhC4C_6rb3E{0q zh)|#`UAbTcGn~K?Gf{SAiMYnQHFskkNnO`iY^$HBw7$k;e|-2#odr&F{NlYBJUh+? zaLt*(oWY}iwg=s0+sVM}9&`loNGy~8ueb}}(7SnC;@B|*PvuW4e~&EFe_C@vOtFwJ zg0h9rMaceCrmSjQf$q@MwkE+F-lM>bGSP=_#7QSj&=f&MI0u-TV1e84xu>bpj)z<3 zStcVaeUjtNiF`t*07M@3<2tyss9$9X{wcs(ZhNxoU`=kKq{2qd@buHzS_PC*hnQFo z3}=7YOrA7iT0^AK#~HYAxk4hG^<&E_+6O_Vd_W_#xz~k1S!F1}u#6;VGM-&( zlp=f-5VN&|aIX!${fybt0gY%zud+qG^!Vo|E;L(nXjp!2&NHFlkH>D{_Yu?0ZwfLwKw$G?8Yw9 z8Wi|K`=Z>?(-?_F8XRytT{f3R5yeh;`ZJv9i0_gRtMBtT(3susy*i~7;Cw$z zGnRA14$R&`N3$5x94nA-uanm|q8Ob*kw9R&wp?MB3;w zLDvXFqAA&U7UVGB1Pc&7hu^7k>E#=WHxtf48e}Uc^h5e-pTWkC-Fv3dvf9C4okCy7 zoPYMx2Bzlz0C|45*0l#{4|zM}kNVS-7~?a2~+{8YC$V(7zT(`XP*%gk#9PFJUg;4C;`w^0U-# z5p`j{UTIteEM`^~>S*H}suoiT0=UGN&)!(-JKqeJBvs(XHgj$dCbC}*Y4F<<6yh4c zr`Qdba-DlM9etzVSuU>qSsc1Q+tDC@+B>d_K6=YzFb1dCYHJ95sNIftXKEQrs`%Jf z5ZJ|RzJu~fD+uDl$0^gQx9@+R1EF+_?$sFhFxjVYEb&V&JVIfm(GGIOgn-h1fKOFm zo1b8AsFr%9XPGE(G)GlJrDRbc?lk$j?_kdC`x`PnWtwOg!g9;+GBzn84~~?)E)w5( zk&Haz7Qdq#LfV`>btgpK4c2|JOWVp6p1?zhIFVkf*n@jCR;(Aj$;tuL#?x3jl{v+_#*W(VKtZUoAKDWQuM041Uyby`gLZ7VQGvR z?Lz7}*J))*f%Q~PgFJ@Bq0th;ON*%?8C#eK?460ObLnj>LV=d!K#Nm4FicVJ?xTg4 z2blVJ{WgBW`d`4F_%{t(GgK}ffWbp668VNK0K_>oGQH|Y5Yf1i&_yOP;p>t8lP)?a zL>XoRqUdELEyn5A@{cQ#{4V{^{LzaT`VjiyhTt5?ZJ19_|K?r3X6qvF@i!%STUZa= z!T_6hZX^>U2eYDejZJL*s_#Ki*E=l)Ey=$NF$(G%4)yx&d{1oRY!?t~&YH(2#Z zXyKSJaxa{d$#u9&^O1*nzBc)BOdepDJI4HkSR;GcnCzH2eI=v4#vv>I@0A}e)a>A$!bw}|w5wV|_|WRlVDhj~K#UYW;##MeEWS5AUf<5u|*s32hpyWh^& zRtT%r!d&58=Ynh+-1z;oQwMD$VXNo$+F?DaK*Fu_+w2QT#!u}d>EM|80kEfWZ$mQ~ zto3-&bNA+thcm;bf@*S+3nV;2x@IpR_=t|@sE6$vE~BR06d&~C9C!^#lCvbdBX9&= z3=Zink6o8$E#H=fhAd>r`2?E|SF#Uxr#ERPQq!kYoDdnxf|MiQ@LM%nl$;nK6eS2? zXFJ=mzt2p7m-{(ofzc>BTwYONmPbpt3w{uS)qo;`s|NPPIF>GoU+0TgEMw5;F|izA;20a|`mRFDpd45kSt&p6#mc64~$MBy%4 zwWrwN1_8&E+2l&d=Sq-!`}a6?Jio(^$))eHn@mxWO8qPh1f4js6~ zTUCXTV?yw80+xEzo1emA*{6VN5GC4yY=;+cjoJ#y-FzVetCFIl|$ju&CZ9|)7Vw$n6*Wxrn7(H15 z@U+H-uqO5FSxCQ$V{nZITv9M!pTPOn6O&`~2AlLmnSLA9HDu@eHg`cW9#91*4;=qM zG{xx6emBIf-K0b}%;voxe*H*eq0 zTmQ|i)xWa2R+qpiU}x{#B?fomZy6(o5c*_G=p1vL4Y%;*$5t)cgzkq}o-m{Gj@(kJ zHq>)qh?y=%+6+81q3+?oNslD<2EiLSr5&3CtaZ9Jra3T$F*j=40fgD?mDNO$&Z0?cF5GJceMhi zOnAn8oEq=KJ2>@nbiuEE#`^`x#?^&#^bGK!9eq?b)34eJa;uPN2fa?mtVX;9%#guE0jR+#;N{HmVvAEFPj#n^z9Z_ z7WIeTik=s&RGs7M5C9=gxa5qy1#vur9=lC)X3j??O)Sy;whj`Rnq=5?ti(}6#$`EM zCs175RK3g#*&;167&bKe;LYtX&CzkPCzfr;^dqLrL?f92(ff();>n!zXqXgIr-Pl zH5jsq?oy{KB=r&Sp6b@vL)EB6aYvt+6TNX%cMXmT3!>cdM^6@%P)Uj|SVL}yDjbEE zzE)1ElbxXGRTz0!P9&q!mlY~>=5;Qw2IY*=JA{8P%?U;|$A^DMut+ggE$`-?b9fZq zYFO@`HCJRLGA>=%2RT8%dd~cD|JUW}*YSC+LT8YZk1mZ-PF=_}U6hHz&M&%O3W}AI z5jrb}fWY5gv2dbKO`#=8^huF2lkJ>`|GW5&2P!vC#sz?)g|ue?DHC5pJAviz~ndIB&W-J+mv4% z)#q4INTt76*i*s-?;ZMc!i``FI+4c<_Dtkn#FDYW*Hd7gN=Au+ATJAM! z3Ew1@TEhc$-=W3ap9luFpt+tDC}V!y;R+a=R10HJH=z@1FT_^sKu*FdQzCA<}t3>!uI716PW;_#S)Jaa~ zUK%?ezKFNA+Dn(;n8Sr?sG2v&whxrjO5It^9~|vTPkfT8=vcJE@s7LTRlk?E;g5I@ zCU}jKdmGw+&YVuOdtZ$QIGM$C)tVvRXEFTV9ltnY-#!#&%CsJzog3-AquY?ur8cHD zc0Q!hORgt2;ov{BqKgDwe?76H>hswd`e+jgQGf!5kKvD89H6!&ZL5$6CT+S~e~zek zN6UsRigV!D(WEOz?-d@#PaJ%_nEwXlR-FFD(*N}hK7D6YuX<0zMiJ2Pp~t7? zUJRr&7}6Y|=#Dhjv;rsaRv$oINT4WtHkiQqb2oZDuL=3{=mcsTTuptU;FY%Zl(QVE zSRu!&Q8KXb&jG}Y^3>O`_0T>K{iD1kRYuodf9|5DzD@7M8Xv(WqJ$)?u%(8DQ`og; zxtZtE4KEF9T2ARw!K+MAC+FhW9_5b7PxXF!zVpZwm@+u^y;R2VR0gj_bxFu_ucze) zhr2DW=Rc~n!Atume;LVn(Dh!bna2GJQdCmV-n(4m9QbANuZiK;# zH)6F*;-$T&gu?%^8CkA$t|Sv2aEMW%B#|*MACkZI?hZWk(v_1IS%-&lzFuig9N;K^ZfZS8V{ro#vpMK*SW4 z)D)MSBt%k?Np0k+naaRE2I&)B^V9d&xD7|}AC-_%4PwQ+>gWcIo$cx6j29-m$eayejCC7FH=Em|PQBcl%lk%H}(@ zlPd=2O(ws1N7>8NdunWcX2tww>+B(cYr2$P| zZG;lknF^ytw#d3=BPg@o#(x6xuc=SW1wMZ0@9vlS5GA zv20_eQV)MbX#x+JTTj=mj>o3&Ixp`{CbHFTej}pO9Gc?VX6YZGO*FLTKyAW(crtzi zUmS2K8a6!R8rC?H^YgR*4xNh4*cxHn<@C(0X*sc(Ti1c}H ztK;qj_qVuKu?j2rJ`5FJcR~8x(d~@AoPAkoWm%i$e!QxkEITcvS ztfi?C`DPpV#0!@-?QW*!=^8;SHzhr|vZWnqneOB1(AA-2GNGn*yXy){bBJm=3qSM= z!{^9Vl@_<-sPJ+TCyKznRBOV`j)4~|>R(mTm#25Yxj2LYr$34k&2C}v=4_(0oUq}n z-s>^O=Q%Fu{uvG|7py6XdXolKpJE6Kp2E!?I~#JRpv z_gxSn-ts~@2-6+76Z2>OQF*i8P4Y(VQbh4d+)e-Ck^)~3#hyQf9(ritmE$YML^%xt z%ZR2=R{7p-LJ4b4nu%}@yl>U1XB%l~eRo}y`XfA><&sSgzh<2xrgtN`*~sMkn@n$$ z*B|D{fi5Ep>E{jp^PiF|xws|#7YE}Zw1q!N6Ig?>9$^Hml-yuf+1c)Vbp}p>+%XV$ z@mX^APaMB-__{+QE;p)xs?7bleP&M*YJ!=)ef=S8Q^e8OD?8j~j`Zw{i0XCyBr#7a z9U)IRo|2eT+@8hQqXf%+r#1)08`YlZBN{4E`$A{g)XDdU+Kk?F{<(asVt1rnZSuBy zuN;Wa=!W0V9)WLi8pF*86mapn<-mmkMg0zGRSvKUfW_ z*+1hFuM}O%-4{|b;D&RT-KL1N|Mo%O!uo-*l|gyX;$lHDtr1SWi9znK`AJBQ@iZkJ zjqM(KFipavdni-7ls^YiLEO*D>&tyGbO*ea2B-gw45TSLFsIPJ=n&4bNKcnyAjzab zeCSGX;&EQP)hSt4k~(oN4#A;9>ohcmRGC^dD$qRd5Qrl+K;icqLeGx5G;hTb;cSty zW1e?XZo?Vye$n!x-4*v2ng39VwAAEFalj8vuD?;46c2@yNr-$ou|FQXV{5Xf_kC`K zkKz0Xj@rCxzL`Z%zL>Q~MM7wBufo-&Rjo_puq#AY%nNy!Su7mBPGmShEnKKyzB}cy ziYYHxmSkyG8LGD>=n`#I*^NFZGF*sSoNg*(2ue@WaLrovl7#;VCyE#~e2SZ9i(b8^ zVyMESHxarToV}fs-bId)o2lcPNsBZUS~3`a)qe1PdG@d!uk(08g=ca2S=nLdltNyv zp;osKP7X)QBX@W}QJD_LP>JVa3UPk&i{+ivW`79Je+o^`KBAL<(|U{}zIY$K!_AT{ zQJPCHJzr^kXH`34UuCqJ- zwSsC&WDU`?+Jwvz>CHp-*V*M5t5kfBJe)ce=kNEEX3|Jp5MGa2LFCSJSvK_wC%i+k zY=0}HcIlxbgNwN2bs_{?waoFFM2<9?Q7PTTrnP!`pex+0#g}neU}Q7t*_DK^kGyl z7_U5~=S-~O@9T=6sjD|22Y@&h}xK+Vk3Mb>Im8SmyfNUR#LG6rVh@r)XR>{8-;tW$1gT?}OQ=U>y8Or%u} zFU{$VoRRg~`I2>>V(w=V>S(PF?q+3J*URwTVr1|Vf0pA-HpfrzgpSZ+()0Jev#&u3 za;Jah-1sYRrBy&dpgH+Y)Lf?Y^(jtWkT=C|R05mTdm?H!l(}!wGqcj+aCcl?T3wrC z(se5R8-wwtL$r@VQujjm!d4rdHA!m{d1+ zJ6GpKy#RW(+&EX}?QrQDskcs7^aj$}uL+yQwAUU?r5p32K!4L(>lQauPn4_a-=oOJ zzn+Y!Fs7lqymC|O+~ZNVQwN!hbbJcDRqK}@&U>s@#lgn8GFWq!)89Ze_8`s@12J-DMQ+d=0#6AOX`gdYcR zPMT$pnD!3Ejb9gas(`57=*XWg)>c$JK~9HfM;PX4C`TSKHD7v6+Qt@-*8Sa^)Oa@Xwj^$I%l%ozh5;>*7b?y`J^m>Lc)1Kv@1 zJqU3{@>Rah3{nE;z&H;hRdHx@oaCcJuD@Msq{mnO&}6rQ(wmHEY(VeDpE`!eJ5VjB z!%+CNI%j_T42?|CeCV29r5uy{;*aqx#;RP=dD)<;+c8rIUi(d@cnzw`#(DAlt*o4T zbYwd|w5%T?y-CX53kmtmD>5zUo-M|Yn*4(Y=Tt&tTO{Qhq8NMa-VosMoFm^>yhekx z^S(7^=FvUy+@?x3|CwubZ^MA)#Nf;KF2}Whfm-TGK?63SG&t#qeBD)P((txa&^0m6 zY&jc+vbH5{n!^$5wlxl$`se6kv(oUWHdH+LeRPgllIM6=5WUbRvk(bFjhw|`&V40m z^AflaW5Rs>JwBVDLLMU7WKQo{$YNSobbWYty|ykY zSp%##d>x3gdj$Q;fa)AUNFdld&{O$I<$EY9P>x@xf0K#SJr!`=u^hVExs_$7P5z0{ zZ!XtJ+lio~=_=ctq0`$^c(Gq-NAj-&dg=R{Aow|~s#25C%i3HZi~WGVtEGk}knuJB zf#A$`jQrY7tqB6{h=F?WUidG5+QqA~JnQ4#uqDv@2jQ1b+}21T$1IWoEu+nWjJJe| zI+JqACz4f&V9aBj<3*^iu)z^^*G=IAJAdeFU##YC<7bfJA?j=OyK>ct&^^c1(WsY! z$&&FaSJzqO!w0cru~+;T{weI=bcSD=RA>+$K3P5+41F(kU6SmwDv?s%QaOi>-DK0r z@<+%!?Qw`UA7#H=r>aTvv*{kVDykB% zo2HJ9y6r#wa*Ro_Z(D?sp|;~3JfjucqfE7M@m~@1NtORp=G5fN{wE=4oiR-tH)-Nc z_}hJ8E=%o{SC`e4LHk7Sm}7U(APh3skZOInZ3_G}bLjduk>>xy8=uz0)PyFFA<;Kz z7yzQYi5SktHeu$&BE5#SjLtPuDwQ$dQmhwsU-1$b6RtN7vrHGaI^x8MUWhW(4JDrZ zx_h_jm0nlz$ALC=KXR>0BIRs@^It+y)aYy`vjKK_L1zgJ%CYYYakBF?Ybs)dD!P(& z_C)BpC=+&9mRP4s3Lh$`_|cg&V-&x(Kf^2W{RR2VmI=eIrdN4zp9KC*W`Bj%-IrIr zXgGrn-Q_CZ`Kz&XcKk*KaH%24IhtUcN!56GV6%$&)ScXv_`bC#hqiUogXKm*-PTJ&-3MpsV#^Bd+OviJK0^HHp~5P?Z^I?wi2$AVVr-1iSRgQdI?t5&H1>S zHF+4VDCfOFD&rG{+0U$_%r)E7SgSRM##Y{jP_3-gBc%@ytwj}^s1M%(bSf~jPdbTUE9%%tG5)I;^GG9ztD&M z*n=pvQRspT8LF)nne5<#BS~O9U(;3eyO74|R_4xvvb zs1Fl*MG98N z+D5QoCP^)086Vhp(2n4hwW3xhZ^a3O%-`%Eff3sP2Af>)xutRBmD@Y8KMzu_1AVaW zDSf)Dy)$ZvLYch!|E3;1tRY6qS-fqn5!c0|AcOp*IxfJM4fKXw`EdU~^}cIhf`Se^ zB(HOR4x;q>C!7e$8_2?zFE(}tMB$~poXP<{8u+A1b5)>KfW`?z&r(7d&3r(R9Xkmuvc+<&TT~!JY!35~C zwOE6ipIJ9tNHY)ZD*InaNbN;1eY~_lp50}jo}2FWsEOnsEuXS*c`2Y ziZIm70)K0R?J6><3leDC2j-xwE7|EV_NyI97O_WbO0p9Jw@wfarYdZR`J55z41;)nIlAPARq;o{+I;U9 zcXS@_9P}EB)5p<59)9>O(b zGiOE<{WgfkRmr9(1m^`x3id=8xd1<6_WDz>ep&^jl2HGB*D#%)qkh@7LPQzKU7h3b z>o-uqH#rdhK&xae076Be^~#1aPg6OKmPDHfM+E+{uh=BYR79*Q>=6x9yA;2>S5kFM(a#-3!&_EI z5ywjh;_iGDY6(=VMY4T!^BRV^H>MLOZjUBVSwt(-s8H!MyrX=fV799HZFr>J zY$DMY1@30AmTEN>M6f)0-nw^9K!{84;(T$hH4^DT-`=i#n?oYO>;U3(s^J>+Hlx>Z z1-PmE{!XCvY#6b95VCx7fek(2X8U0;Uc>AM1_iY8p8nWKo~wJsK_rQC!eH?|6{FOJ zvgeZM4MX&fA~IVOULi`BVkO4Ee=Om{2&OdRhpYP}4Ds?Mt@-u_VG+C8wl;r=m^F2HrsSQ(W=L9ZO{Gb(=$ z$n;P2gAF9Bl8TP06=54{Bx&qi4~rxQNt?%9yWnpT=rxVm%!wt}=mqZgiSMOOY8E+$)I9)s^O=8={M_m@-a`~t-p#&TPc){c4YE`M;ql!=S!>ib;K zsVS=L===EF11PfCd+B~LE6Pyf3$ANagZlFKOANn-ER2!6s%q15C>ju5J%QwbXuOE1 zQ}&U+rSHYh!H}Qv&McO~XPk%9U`uCXK&B~~mc@blS6J1Jcu;UsETq^b)@#e+&$B4r zPi6sEd;%1ZxUT&UD3l?%pG;`wTy>)%C1=W=X-_m+(J-vxZA8>XO(SIdkn-05?z?4U zafp_R^QM2xYg&~m&9|M6!67edu!(YC7Q>YC*iQ)!4JApzIj=$(TJd<0>-s5CeYL_o z*^IqJe*=OSRiD@*Sw%n;oi1QE)cE9W&zkEjyw zqGC$*Nph3gn(iYB-+EchvMe*2?-CgF&olMMab!ID-1l}>V>!*Uf=IlB39UVhf~n}4 zBN#j`DY$pb`%y>9{^KM>90JHH?rIq$T*x^CC&_dPf|f~Pm4*cyC9tVA1I&TNGf}(9 z?lmMhIS##`n=xiH{Xf%|WC2qTs(X7~)?So_?92l|iuT%=^g20nSU4?a_1B0HW zYL{nJeC{I=48uj0A5S7KS;O#rP2JUMImis~eaF9s#v-^J~CKwke5|dMP16qm2EtXbtY*I+)|-F{jo@GYY4e+ zcS(V4lTM3}Q-qPpp%#DVgTdrxcWqX%RLbO;HLg8KiL)D3?7aH`g|x}qyq>npV}9Wa*k7L+}E2a{5V9H_25!sAux+!|2Xv_&A%|vP^0U>?pOv+ZPlMTLvykzfn(M_$kgWMB* z7sKPgVM29vt%T@vy1BZwec)3YqPHEi%YoEAomjis3>fqyxBB=Mk^9Va4o6g3Wpw=rQF!8l!tuSZ>Yfe`~?jiEbZlXp&uc$2UQdK zcvf6&J+VbvJsGu3v|$$|ZBht9%aG&+{6PDlCNl)|Q8myE%1Ldl3)(ds$Q`rywKaNS z)TrvB2tE?Yz{_C?{TPRA%}2#ayAW;H;l=%;uL_k5s@KIfV=Yp5Oh98V-ZOV!P~DY> zP&M%q)XUxrEt`vX1paxE)ykVEu~;oHozIHMUS>6CIBW5TE(lHJRmK&vgE z67ctQut%ntIm0w)Q~!LzUmJYcl#{;=Oo0nDlM5lpyxdad*^lBq=kr z1xGd<7~~rwk+zPbW}_=<_173OIg= z)}~SepFGQpRJMs8plY4Tm}Jck(AfO(RE0jPCJQZgYrVOY#LzLZ4#a@75rvp)G(O(Y zz3~6zZFYE$Xx+DB9DO8bWB5Q*P1Y}R z;{1Zx8KXZkX(IeE4-IiXz*+e}`vG}ch*8(87eTZ`OeajZp7P;BSu0?E#|X8A7Dn>P zcs9?!GM8Z87(Q}G)9tQp<`L0pjlK6v!gm#BF1!~pGewwgk9Gu~__+;0__|+8BYeQ$ zx%z%#$rc8XWCHP`_Okp!vy4Jo);*dnD@7ed;5(yJ$1&x1{ah-r<_}NfIasF!RMxW0_9ud1q@N0IBThb9W6P zK60_;k*+Mpw8uka`G|>rsTB=+IsI~WAa*TmaZj^~3dl0VqIhzzriS^dq5$=&r3?LZipNl^CsUMk zvDxZ9nS+?O4|LyLDbquN0rQ&j%C^SucFs;D+@3N)X|{64vA}ix-H=a1a^o5}olOT1 zJ2np@B4vlBcEX;}0t`By8?V#&qk<`Pgsj25Q3Y%5h>AWqcnv@ot|Y^@XPO3mDrBAj z@ONZJ%WQqH3-HuJe-z+L3Hu!Y#$7)}(@X78&in6!yhqQzg0GH(HquID{x(MIKHrgm z70oVH{!-Bg&nsBFuIfTs(u995x7B zN&3|P{Mnd$<1QF(r&cPc=P@x)`0h2`tvLGn zslPzB&T5+f!w`$Khve@c?Ree>ws$jLx%K{2GG(h~p{_&BmPnBSvY+OI&f)*Gnr-nm z^7>UpT{ypfq6|rY5Dg|BWD;4*0?=C{+MB^{n&4~+EnecElIrPY0j#Ll`J+n2zf~Uj zuUBG|yC}EtCe9pYL=Glu(qrX+8D8I8-|ibeeos`5q%;+5%g-G}Fi$J&luP!n)s@yX zLxQ>>{SL>_*YVtMRwDGu$S;kLws~vp1CeaVrBb>63?R1O&NE-9u&o&$y({*G^2MO@qS}TStuMd+2R{I#1GpDA zZpApcoTyiD<(;=id2UsOnMwyOxf-RSNm|wc|5dG|-MofI;i}J*VxyipB~gSyQt?*+ z+zlC8-6Z&`NrQ~v%`y7>q+bw)&}%lF)ZhL5+uEexHN|Hb% zT&#J!ZJDl6H>dG~`qUv*aLj`p%d*)`y0HoI^D00JO$}iGZ<5s+xBk-6tfSuPB+M$x zReo?f;wAa!i^Vpt00)-aTZ@;RJ(Dr9`jGOvc$Y2gQGG=XiJLV!a&m;m`mLR>oDA=1 zSi4Gq%2Um1JW05_$ zGZJ12vLlMwo8kiuu&XEz<{?T#Uw+XQcsaQ?+of;4B%>X7CDZm|CAjz!AV)@V_eCx= z61jl*E`fK38IXKypM9_u%wY@16UYuna$*29>L9mYMpM3w8EE)<9c6Rpir|Cwf6%Gl zP1V;um)=@aDSwVN_hiDYM(C9~xA+pCSNCxRpOB!|#s)zOtJGjy7Jjh#&0z=3$e%t} zRU9C{Jj22See*Js!@`-^WjIDHMTtPRr#XjrRd%B3(iP@G^D`Ql8ZV%Y-t2BrlKP@vulo;I)VNe+lNk88 zML-+yZpk1(O_sDv_fFhW72n--OhDxsZeB&Yb8-XaUXCAbx{Guo2Yt@&R((2Kv0%kk=~) zVIl$Mn*jEPl(zuhykfV9v&ks%uF@10vf*nm2jM42;#4cN5og4}4x!t8v~nj&vbOQG z%0x;WdRIRiFx&JI*O_bMY^fB1ZnKk98@(Dv)!F9YL@DR;B0no&mh9{)T4Q$r6-a*# z)32_vbJJm^fe|(MEq_76n0v(B9O;VC}>RjvX^Vwa;ow| zkPqXgAuYjAtk!MFYYnIt>Lf_3UJ)-zGd*PG2e$t>x1Fr<&2(V6Cd68Hh#HP;IvZoEDWb@- z4mD9BKVQQt$O&7OR;G9v_oie+Oy4CbTTB`8h=9xTvx#XU|g}6rYv$1EHWUsO-O2eL&(?f{yRO|N8 zez*>&tD9V&x=Fy)u2DJuXoRRdk=bU{1Z`Uvk>lnv!x<0pFQM**k=Xz#A!(=Vmzfps&R6ZPjc78d1Kh$$|~v7Y{(IK9gPj^EYXKE#k; za|VcU{qCm4Ir;sA!V2J3ygpJQWEX(^VdVC(uI;Mvi;fKh>)hGJB>sV00<3R78>3et zpwByRazm;;pnb$csYqN5(K;Ut;YsFCfsIUk0OIqQ087Z|`v+}_#CBypG!`S8CR7#M zg0w5%6BB^@^qMSi2X-!ZJ7Le@RVIFNftoW~NXV_^+Vl<+_Af~LGT5U(`=Ak8xCS(i z0J`&?$GBME zf7O1#K+iZ9q!k@BDC3pUtxFI}tSNEwN1%WKD!BmnTM5l2-lsB~&{9BwQUAoCJL7I$ zTU(>=gp4M(=umnZes5y|qdxS(YK=&f>poa zPYwECm%22@^EdKJ~^aV z2sdt@K5fra{3m-LG}!a_qp#mU=)2`!6sXl^AWuVDHaHe0_*m2fd|Gk~7ABhOPd+0f zwLRk>KlF|DO<#t4KF`3xj5_f4FMQ!UbT8nee5#{}eX(9l9~`lnjzF;Bp5cH@K2S_- zICme$&frXl=6VdqJdd&dzHi!)M}5|ifd57${tu1he<5P}Iq)Au2BbiMcX+x(;qBcD z_*Ev6J5T-I|3(C692WGJUXCGUe`QIPk(njI)p!FXc=4anHK{Uu(#?4y#Bds zD@7OZT~_)%>Kg~^XV2opHq**mQV@-{{EoBQ7Pt8oBaAP^3K5O#d6 z0FoyY4|Y$lJEH~ShsO@^981e9#}BXq1>k;;LfIB_Wg|y}ja7cf`gr0ow2XiWaEwhO z>4Q=j0cudcmt2;jVAzOwDqtxKnkSCF874Q#b@Qg71b_iJU%Oz-CICt?oA=-`zIdn| zh_OE}2-qiIy_uk~zaToWd}e|%e+tG;q?*Qs%+VsVwqTc5F76;?g>;}Xx3FPA&!bu}U1JL2 zi){;&K|PF-a&Yv{po-Y98PUw*hA<;d=0Qs}{MIH!+RMH~2o7&1fp$E)A=}%eOByw> zL;rkCgM!|U-rM`O$xo~I3ZGVq7)iDcD0huxNnO4w!4m=*?(GD03`l%IFeG zIf`xJIE=aD%XKLmH`R9cx9{t|d4%5bz{K!EE!}m*Lcib~NOmAHgqc5#tWxx1|3@YQ z%qQVMvs)Jxb7nH&q07x0kl%c@_;EcWQm1;3X|uSYC41rwqBkeFr3-g%nVJ0E_Kfml z(}=j;KdJ>3AH!eun^5v1Y47l6wPgsqWdT(p?=+_%#K~HE<9eeclM@dxRvUbCe-rjz zAb$xNhnim?;M=Q2;WXmnv%76)Svod+cZIT5@Pq6!K$;8qX6aTfyx8N6rYDYSeuVcA zt;Ea=91`3iRC{i6=j&eAS}e( zQPPL^(65_;c0W@0OZA(X&S($4Kw@Y_&3QqvoY5v8|={coht&8(CH||HV&?`jYRlXE;o11S= z34sBfd4NIfU=9SDtD%h|a}6-&Xp+&-7Z?>w3sSU=A}(w{+urOIj~!_qB#VK7dc)QI-SD03Z}!62Njc7e+g zr|SjK14<&m4PF<~n~tdJi)FM8m)@UCT4VTWr9f9Lt~wIqkEjvf526w`Zo*V3fgt6p z^TiU2P^HDy=&-^Q+*#+{{)aLvKE!yu}T@;Chs~?ME=-a#W-uvKnflw9=@O zoc)!5)Swkg?<$)U)73Z=I3((bN7H8`&8#x%nb13*?d@qL&7O#79pZE^93F&c5n&Z7 zxe}$-%AwE`r*_|(gW1k}Z^ic!hx@9+u~MU?k*T%asY zr(NJSls?#M4t;TYshXHnL79i!hoCPiQELz9Kn$qcY9d-ajf85lrCrcgo4leNDth$q z_hm@41FRS7D%9X*oDQf#0aUPF3@&tFT%QZL%nSgpP%_8F4w2kxUI4C+(m*lbs`3Gw zk7#mklq!DSy##>Drnwl}=mT;EHgmdmTiN$00&rTC7ej~Eg70gk1eRUe>~|e?!gf^6 zqvSmz1s};W=k3Az`MCnyi-&IaAYJ=T3ZWZVMi5Vd2K&RrGzoaKTewb&q!$T5HLsta zrW(vYGeQafcqwgF8hQTOJrXV_Nl>PVi=43^q~?whSMQqI0vPT>TKe58_qZz z39nd0mM^yle+4=OXS^n6Vq=g^!`K_fPTjdAw!$8)6nZP@l;He)S{Q(83Mvx-7Owp( z7?ue2#}xpuJw*1IBn9=W@VPZEge*}234P)NWDs=a*7oXg7P3VERq)JuH{`(SUAAz1 zEH@^r_W?C5eIff+#lm)EPX#b``moAq8zqJh7#BI4PRt1y*RdYevn(j5PXzdK+>*t1L2l7Yh}297Ed6bUp&WvCS7A;A*#==*n{hqs}2vw zr!0$&pfq4pYMtX|sy;I}pu}l?#PDEt(lqiP`l#9T#2&QHw>~0%<7(W@ZU9_AleE|I zIBq%eAj0&@4C*597!rET9t0o0bHvvDcKWnA02YY<%WG`mxciihI$%-5GeR-tH1mM) z=lQ!Zc7*Mib9^qi47qlXfb!rf4imj?C0qXJD8#yx&@1MkOmKpu;B$Ww4~N_5r=2_) znr#l|=W1{@c%|g&G9K?3uOgut=0If0k2z~>&WeP&d`QNAL30q>@sk8E#;()>Vb0TY zPbe%Lm+QT;puC71L!ESh-*xACKe_&FJ8(Nd5sMpmeUW@P_0~%YqJDl>FLF%KvsLc*{E} zd^=_E^2&VibY5e!)+}@I{cP_?hplvJXZkk$H(AT`=9`MWH z+%>H6>vf|z)`YJH*&^F?$m(PHq3Y>=>FMScxAcjCi1AQ_Zc$Tm++s?icPIk$mNEG^ zwv{@4W>+mXtJ5)b-v}rf4OBXK@1S1rhv=trd&r7%&4G>BsIvI!dN0N7J>|C5q|E2d z!71^(DljA{bb(4u81`UVuCs+rzmxnojex3keoJo*-n(ag_UK3;#*P~Qf_K&}-~sQE zc;-O=hoav6q_EPqZT`0!vf5Aa~|j>IoIsW(9wQPfvn6q^GWC>MX|53pruKCM|>bgMwq%;zG(HsQT z-?HdkHq-ETC`;@*)dxq+!55D;>n5bPZi#Q*A<839LjCxm@WF4^(FxM=`vneIoY+dr zV5I}fof9@3FwV%}`Yd^-uYr`oYXmSp7H)X(I^f3D9UIvPJ@)>Y z+04!JhotpZ1aCH$*w$XO7Id^%p6=5i$7}~bjD0v@+j}jKcDOk;eZD$bo01L?EcCJ) zjggwGS9_0Vl|9KJgdfHS{c!Ne0}@9*?TxzcLDFv00n-asI=;vr%WOzj>0VCNX7Cq` zKM~z>G{KSk-5uNg*s!(^IXKqFU~F^*2MFSmbor5ZE8eIjpH{h>8>m69C`IsT%^KGm z+pf;Nw(0ON=P6Rc5_6Dvhv_kMu;$*f<0T8$w2{5!gv{2x{T`t9x@yU(FvE+8e_IY) zC42#iZRLR;@?zeW=~Wr)`28|t*&)a0-?*f!4gT@Xl0e99aY zCZ_v7t3BmfJzVtQ=IUM)q)wIk>^QD#)QO=gbC|dFrON zByEWz5N+xUR(l_7FgTD=2Un1M*Xw{&zxl%&9|SoC?z2>=K()3fvsGlnNSU^dfJu*o zk)9PF!zUP8Cw7r~giBcmyrMaC=8V6g zZXfn$I*gS}vR;s4bT0*mcP&bpt9;B``Kk4HJxZatj)0n}kQ3ZzL^s|qtIExyDl*Uk zGYNesq`W(16Yu$7&cU3nutg;O$ug!o;NA>(x=rc*KTRGvd>^r;@A#plqLoM%ZsdV; z_?pgH+CApI)VE>Hr>wUa_^W+GOOB?al_6Q#o73ACUEU zj)1MIUoKy0x%;Z5)ZXeRH8MR*fnOK<${U{DOAc&X_yc?TZy0QmT&m1Ieki`r{nlZ# zJ0^419rF6I6{9y8IUf8>R#;#TItnXpAAYm_v7oMr`eUj;y+?p}uxDjmn}VYMHW`UZ z8?vHK9#|9i`F`n!4sq3qF3d(<8?4DNgH+eN$!0()Bzrf25|%t8lY?@%oSzm0!u4R_g${+Eedx(%YhvcpEQE zmylxW9qHB5J_zBj~QonP$w6+g{4w4Qxm?4oV+caYFTeptea8Q#-+ zp7wNH;G8Yu)NR9tw{aTbAiRyzvO7kAqZz92RsBvf%}kz7tV|=W)8y znvBrjrFuX!oQ}&AKjl!SXJHO8gyWl@>g(_TO5X>_v%>Ck1KBGYbpLcm)+?2Ru7C%C zB#gVF$8u>zxXv+pbZ{oCc%&~3@Bx6M$oJEF5YzPPZWGF_K{WQ`bjL#?!rvUnTY(k; z(>TWHy?L8>6^R`D557=@)v3b+WNR|$cbxKsbBVA5uoQr7MAerqPyFe}nL2mY`sTSn zR#k(ppb#yR|73mm$HmAmL%5In*03C>3D!a@T2XzRne_BvUC|evL_LL>L4qh--wU#XVgpG#oY7K)8+eLe%Om)+cB($3)r1c zmzS=;ln0O}4~&ouM>OgFAsO=iQQ^C$5#2+~w?!Tv$LSvrh&`4eS9!P>r?Gmh6ciN7 zwq(#*AtH(=>?hY^nXo<^Y7d4LXe__4huewoeF!pq=Q=Dw=?~N*9Ps}rh-c43^My&s zmK1Z_Wds9aY*+?m~^G$`jL7I8!Od+J#i9?07o;7psLJahRXL$mi&s9b0-qVFx{ z)=J4-{w7Fo8x~{~cWPrXcUWfO^3k?=Kky*7;<-BZ+nwyO@vZW#GaN{Wo?`CHE4o(r zr3JE(wg9+y$Ft-i*6QOAJQP1%L%r%bPS32(G!;LfqoDP-2186iR&O;7}gVxDQ)+{Do zh)(15tdrRmMv=|CM$i<%TNP70*2D+g-&33A;6j{61(Q+f)RZ+D;{RR~&pPl76`spUl#3i~qXd0|<0H?F+voI$j<7^5AqLSn)>?QWmCRAU~tPF5ENdHeQs&iV(;? zM;&po(9TEO+78Q7iRd{q?8G=nm%A0l0zEn)K5*^BmTGc$mDrg9e5fS%v zJf5I@JaKxk1R;3p(FDbSxlgQ#F65x`@ruh~1<1#{G8h)(w8DdwjZadZ=x)Rr^S6rU zP$Vd826+m9g*mRQuqVJ00Irj&T!TIkdcaXz$)pn3eIL;?*6Oi#hoa6|lR;NHlG2Apxznos>D5`2-FpWlQCB}J| z5mQFp#-ySWXocRaZBjpSK{nuz6t zV(wu7P^Xtoaw{v{0dT^&aCw-I(~EjUFB@mxyF+o53Yx~_Rm3OQq{r&kK#Xabbg{5} zr+{s>bu);uBEqqx^oTFnIO8m*80k9~tn#@5f)$IL>LWxX0kSm;xoG-G@zIYt6*=iB zHEh5V-Nf`I){1iP5mS&!kIQ`Q#2U$!!|TMf6v1T1>2`&kPQB*@zTs(;iRgu7zAWh<8=c4cM`yg({vv5?hRfF!5X6Ld;W}^}=DS+uOS;GLFj_bj2l!|J~D_&4V z+90Rt<@cqVu%|W;Y zE6Wsn^$^Z4onDYp6u|jBdOKET!31>%$MP)>;yO@Z$hU~_z=oD@k(USSmptkuIZb+zQ&HfYHt(7lKB_|(b$ckj z8$fc$T60xqJ`TKSE})6k<+v;jxd)KP&aN@A@OIB}AP7Je8aZiim;xE7;Nm}ea5G9} zmYv1lEBIvE%T^>}3sp18;T5(wxaH7l8$7R8{Nw<#0^`W6?hAf+=6VGdZlOG)t52t= zZ3+zDpEp*Hg+YD{pKOCQ3`41$h&=SesgbqO*r&PR86lgR2jjMf1t0ui<_P!!cx-`Y za8wC*aadTexXuSR?7g($CiXy{ll#Mh2lv>w{y|*d~^>$f0~&jhM)5St+JPqgm;< z3uNl11%JGyd=mvfAafTg(3eg>AC?wuSuXbpGq%U{MQ4d|WX|~%EOoJi$fppaNA$_# z%)=lUjk;1%7aFsGK6dHS>45VCfbhU3|jcBP|7wvPP{sdGp}+W zR%6Nmdc?b4oVjX+zrgS*BOe=?c0Ltd54_XNr;-y7SEG7VmEawr?88WO=}da%^~}&S zYFY>JB6;-;QK?N@5tFY`M%_XVuE(jL9`wLzNieB`G-S?VN0(2dR$fthG3v3%lsU(a zPKN5$PQAo4<+cnRDU5OY*^bH3fJv|l_o6AH*A$e8xB^Xmd3Xznq3MY}AXCd-C_;eD26(zA-7jlYrK12AB?cf%L zDdcyowwnWFZ8PYiQKE+`kA>%At>uB5xl9>u$6qV>mYoA=hVzzt&3X@~2$N!aOyA}}{ zXwnOuqK}>n&Q!rI!1G9-AF8mVaI{pEP8SuP7aQ@+N(JklNK|lzjXWgNuwY`k9%p{a zk?Yn#%!7D?7n{MsIHGS7@iCkW4ItJ{Mr%A2)jg4vof-73K*eLz-Gc*UWx1%1tPJ|1 z6vbl+X=jr0=pVxLt?@XV3^!EyT&zY1YS zczvwA?LkgA6Nz}G%b4b5@|@V%W5>h#@+L5i*%#Em%f{(MuvUKZrA#?xaUx`o~&`5Jaq$GwA^ivZ;Oz@q4hj8|*>IRc>+05jXLEm~2lv z-N`O3_56r&DDI|n8?Yl!!E~MpoE{bdU7Nr&s59t0uOWtl0 z85jFZdT`p+RL*a=a{r=!z)pkNlXy}Un%nEG`1u8u8?WF9PH{SUZTH^OKoLzhDWnQ7 zYS26GIG=o~F8vHQiFptkx?*mJz~GaWf178pyx8k8e=d(hIzIF9M-auIe5dobzNSarT3n`vEcjXzxSt)~U!@2h<4BRq(HT+R9qy2MmJnxz{S zLJcH>8(m`IAEG>h+dCd~^n}dJOf%09C-D4q#yC$s;`dop`=)bH ziR?MY#xndFlFunpp6I)dGlg=j2RKem7^9otyp=g@uJ!>}8;Ws#XIb8O# zs)8w9jPpmh%^5dYkd0Q2WC{13j_%CC4LbW~aa+)ePQqz@JU*dfLUB=%Qr0d_r^R^2jbYz- ztI2xS7#3JFogU!im~~Gp)q~ubz_5vSMW zp%PBptyxAmKvOT_9>~nfVxDO>!h)f|pU+MItYWDV>Q^N+m~CIO%!?jcVB8v01d~FI zKK&yr)!-a$mfoGynpA`Cw$@M@b-DXR6syP}fJdJP>6Un|_8`Wv>h!!U$Kqdzy0uS( zp6O_DX3h22y|W>e=Vxouj%-^N46$#$?b=zQ4+X}2UK^}n8cuV|NJYHeR$izWdwK#T z+|p-8bBg(EzTy6RH;9oby7=Qc-}Xiiynn#AL;VdIJnrQ6AiiNMz0<#*6^ea`s2|)R znIY%z@HcM2J&eM1KJT@95c{p(u^xCwl+Sxm4{r|g?Naa%T8Z2$*<_>#!W{ArKA-Nv ztDyt0%w6Z?6!~yeK5&RLJ;5n2Zrl1A+@`?KoIv*N7~R1!IIn6QS+cV(X zJ?^n$zX#$Wc@VI&^7G=m^Et5syBLmqtJ@m%^wfep-;^0!Z2yO@BH0o(=~)>Cd8WP@ zftUwY2YXJ*lXiSh6}d;n=Oe904w}!_E};@^Ub8jmqJi`Br~CzUu)o=lO{eb$NWgQ5 z%Fk90@*#jonns_bePtz z_=_0gvD`=K;SCbLeFoML>y~|q+gN|_G+U26ygv}1=F02VlOu+N}36onPu`6E(`W7Hd%zAv7oL*oW{lH>He(ey&; zPv=`_kXgb*9E5byL!8#Uft5(w;B2@8`k_onCt*VN%YI3Di2Z>5X2GnrI6B{oyy$Gl z;;KjdC-Ee}#%F)CyrMQ<(pes$tEjVBSao6KG{POndz1=%^o~bgihFL`-^YEAXJfdu z-imCzlC?GH!C|*b=0(Pw!?C{!^O3!X9c{?K$JBaBP*AYPc@%ujc?38j(t9uQC3$p7sj970hT_Bs!pvR?56dk#Zm z^ujxt6?24-UcpvfR0an$s<1M+vSM+9Pbz6~`%ov?BFCh-vSL+f+snWIb7-QZ<;k6b zs5OsRqYqKTZ)?&gGapqNUVM>&%VF2|uv{p-h|^qa;uROp{IF77gQMgqYp(RinbqrF z8_&bUS~&c2%B07hxlJecp-SLgILwhLIPQdZsI_Kj#8g6k_G5EdrW!o6JXD zMHQRoV=46!kEBh(&;h4S@~p&3#A~Sr-EK0gJ8wMJ7SrGbk<9y@Xy(M3zq(`yp0Z5d zpe+Qo>0P$b=W2AU1o1MA=_(mi4!T<&`~{0Ib=ZTYgspQzI{i+e^vDvi zPYbbh*atm<1{@o_6Qe8esVcscns4x@8oo3}Jj8xmT9!QicEOkSKo8;_z_jV(tRB35 zZi9?nO5uc<_&WOR=E@QNMK@|{;n-^vgM1FD#lr>Bl;shxSc06Co2~gOW=Ji z?1ZK7kYCZZ5s&{ccUWfDBwtyCD3(L%#q2E1tDBmwI4l>>T1uRJKq_dWTi?k z#M5aBjybx==#x`_CU?K!Pr+sJzFfv8@-quxMx?dU5T05xsDC?VrYxBI(s7&8kk=vW zJx+H#cJ#OYc5^ZWY{NT78Izqb6wSBH7Pt%QqReR{LQizammBExrNStt7L7gwfFK6| ziBN>o6A_#_3EM=22N1!Ie0X!)k2xd(fyM^{fE~WGN6xYo19<`0dI)kNszl)b@EWLa z?lP!?zx(i=^RiZ)MALMMdp;}_3>Ss}tK~0hAQvM3zc3k8@LV^1ub3>y162CH!r>xa zaT1AaUEp2>>;D&qr|*T&Z=efGFV=oyeOST^meiC(-t$Sz0HQgGm}29VSHrRB+>-!q zpc1YQ{x5ROkprl-e*LFFp)IPY^Iury4g~A@7bX~hVAKD?cuyi&t}O9?VRU6)pmP3S z*l7%_`WFU~poLLTnAO_P1D5FDgnA9@mXk=-P09#abcJ~N3(_1TP&>@p|3PODAz0Jb ze_%ycNS5w@VX?ar?DM}c?|le1@h?m?3c;rSg&j^tFwVh$U{2=|jQ?NQ`YQ+~_b*JO z3&BcciT?{L)j)kv`!7sz3xd`E3wwZJ&HuuF1)~1A5)HlJVU)Q#2scB#y2wkaRgucFI)sJfII#mc-7Z`;0fRnxU0ki)EfSUO8{~BQW?Qp{)N|Y zF+r>~f`h?-;P@D}?P&z3{R_9{Oed5sAUN;8@D?QJiAxAB@h@B({ljs>eH%5>xZICY zbHDUxE>OE9OZ;CtSJbDARCs|}!N2ef1YfC+;1B+VD#Y)0_bf8n{D$PNUd9(^z$ zc9Idy!=bBW*jk$-4@D88ww{xTPuTJ`w7#$)wy%fl5F}1 z7K?%nl|Ntqfyt$yvNf{A|Al>VLNz>@1DhcZ>7RCPgm)33Q56=m;_wWV9E9KVKsm?% zRvdPLo();E#`mKI=adf@1Zz}d)q-2S7~!7VoF>0(JRgD@tPie;0nZ?WvAWN|3_T4q zz&5r?dljpbA1O13GENX7*aH5i1+jH%O1udZ5QCSdPK%$VGPHs~niLO$E&+mA7A^j$rF+@IU>jy$iuE$`bz~$(JZ$;lW#JKq@^eIYi1OL;#hrJN2_g~ln;TE{1>i|;4TFS{`p^cI(;=%e}Uk`|HAL^ zDFCeMhTg){iuwM?p1w_mspn-!;r|G(l^~dF{y(rXRA4w4j&0k7Cb$YQ_z!YP4uT2( z3yXMwU~vnG{|l2q{-a95zp!`0XtK1({sY6?6eCl?Fb!4uGXM%J#vMY;lm}s6!>2L$ zzka;>3Bj6ViT?}3uHfi(dD3$p?M#Pz5reS+xRCVo##56Zfzn^n`Jyh*<^th>LtF&| zm$ho-9cs@_u!r$gA?$kp>NO|7HcTj)lN%%x0p{d))rktY9d1LJDQP$d#-q${C3uzG z9{AH~PHr{+l~yBNs=qeX4p(cr0oUq5u>93IaFbhU+I^wj(4L(<;34GFO#;q$THt7C zN_|GJi-}1wH-^e-fw4X_Lid(qRCjc4q8Jwmot}V_Y378o$1b1g-IuRo!KQbx$xv+i zq_MBeo4J_k_8^G;Xh_TdX6f~Hb$-B9Cs|LdA*@{9L%V9#Qk}G1xl!1Wq>Wkun;G;u zUc05*B#O0Cq<)aJHiIBJF<_1n$SOVK_NMi^Hwjf80KrQe=d9XhFK(|mD1f_X#+0q{ ztCQp;r$Thz4dMPMy5!{7iOfM(GB1WUDifprE!;pPdQqdjt1}MTX0nZiHQ$nDo+JYP zhH4%!)*FTL}xk)*egw`g4TAqEvAsYxr?7KM8mp&wvcM_JY(Gz*wWD@_ZUF@;z7%AJ` z3HYh?naHN5#r{`C=I6hCfhvIMN-EMdWLHUOAs3DiThDjE{#)?<>Sjo?>emAIlXb=E zN6i51$;M~=xXcjy7kqJDm~2YqAczPRl5h)hD4MhEWLLlml9$0GgqOE1xO!xcKplJd zy)>PqHA@0JD|{DzR=db|iP*bw_zGD=xdB`k@JDK0^qeg+$4{0?6Qr<94WU$~q;#ck z#6_7D-KPy&Bs5|YzKGYCt}=57t{hYF_D~@C1}PI2pG>4zbLz7yc{=8vvL>OP%Ea>h zlhUWhQusk+2v`3)5}Kt&0 zSV3Y3wSfIPE9IV=)D4T%)_gEtPC|7jp<|(h%Bk4fF3&r*cc4&;!D+;XPjI@MU21)o z>^1cPV5wNIA~e})I&Z6W#&rP_+JO4~{1w%g{IYCqvjCYPA5!8912|~6yo{p>6UMO2PH8zK&J!t?lU+-1ZGx!KK6S_I*Pz^1K*atJ%V? zeq@>Tlh9{VtU8flJ~K}#?Q1l~#<71^NRYJ;7edRZ0?mNVy2lpxKZbuiOLAmrf}Z1z znjUwT1Q&b{?fH6%T*#3tVD~OR?W39BME9?8n0_}*N_smH9PDV*-u(OfOQ)65#izW< zTGdsET@La~4rb0dXff#EHBPCZl}Uh+roZIX(Z9`A>M!1CQgh+67SLF+cFBW89-ml8 z`4Hb_By`yzn0xZ@lBdSgF%CEQ-F8}$wFMZ$g7ny>xqMkRe_VgrgxQetPHO@2Q^%Ha z{TLTq#j$kx$O@7}-vo?T&(=w8Y247%@{7fVT23oqcd;3yEFpv7&*z5aynp*%Z2SIW zwH&oJ3N%2$l$q`(`-hs9OSUbNk0)Dxr4ZiS(XD&sq=x$0<+hhCk5S6q0IKJ3>5VRm zQ~$M1v+xi#%Bm_v=t@<+{nsSN_N*_^eoVDIXb>FysiEJn)LcRALABqwPg8T&5C)ulX&eCCLiq331kw?Ix;EP+ zj9T#qhVWLNlZkHK8}?#(uZ!_ZNT{bFTrQ|<`Y}G{_S^-dhmu#3P<2DdpXIP>XY+yS z;LUG;;nARki+1l>^LXp<)~xEwN8fsqePB8PU50zs*k$#& zdG9p80v}T|Mwz%drEjU*9JbNlZc&@gev*S=5e$9*(DM0Vlhp?W+(*w;k+ecu;O65I z%j7)KPTwKjr~3fOH_HIN7dpQ-Y?oID^azNNoFpZ)pM=KwJJ;pYtKYEZ=?G^~x^=1$ zKE1}ho=4O6C%@5n&h0BC-%XP+oB4EoNoY>TU7?Ngbrh}DAXvIzal@;pOGA^AtrXQM zgCBtO%$-9U9td>`tN7Lh=%e1_QDx&}Jq#WvHr$HS7&bAKg)?PJU3*vz14XV-cl#m%o zz@c}Slm`dwK3=dY;VURe8p%RtJ$Bwjk&``j#qolfvY? z_`&n~6jYhG;D2Px*hSX-orm@sYoa#JtziFr4@s<_GLbwM=`7)sv7j~K#xf>lCPydW z+=>)uFR6DNo43?;;3L<8vSJ$F{FTDHWms(AdgVH3-J$=o+Up&(*+WyZ~^BEFat4y??n(Hwq zUdu6U)yCnIlp$H31l*kyJ+zMt+gJOKPu(Nl}139 z_?E9itEi>gTnHb=%-`xVt_b8=c+|9pQEnllJY=DynHuqAGpbp6k=mz4coGYE*ss=Mu5{p4O3v-ohW3{T_h z%jHSzNoC^jPD#I{qZ9FO_PVJ;N+SpqL9v7=zlo0V-NyRPd4AM-P8|gA+#dJ|EamVC zugTTjl}~aQF+|Q~y#JpJn~I{v(#46?QhBaSOl(rvYw=v={>+z3#iRZtUn4`9G_roL z=v}FNp3O_&S}!GObq(REfED{rzFGa^^_M-WYbb3vss&_^t=(7ceM9@)vnyLVmLtKL z4)!KS-6T2^#R6RJ!AB>!n5)jUNch;3&_7zht2`&zG%|6?YYT-!-<+_MKk?KAHKiwHf(hBwNp z$y8@vD}(`VFAgl18;^}hQLYL>ocOo0g&&_G>#0HB{^?hddmlUBSf?f|oq@5r#wIhA zY4;ig*FTNKBzYX*xVq=@PFrfwmJWiLK343o+MZqaTGe|MD06L-1UdsZ#4dI+3aAg| zyt)fs1>@0*Um!3PRR?PLx(77|m9&@TU7G6h9K4ld` z?}1_Z%>>(q^zQ27Y=oUJ$u{xqC(CLZLdN_{8KGl$73ykaPamb+ZCVk`Qi{nmJ`qmz zJ{=jkLmAigXmqwd%UWN!<9q7b_3QjGrv&z+cuJdJFN6o~4WHQ`psLNT6+4wjxi_Z< z!S~iJIh$X-8H||IqWOzjCNf2^o;xwe+CAwtVLU&AtCpnAO#-QLa%VY>Q!jfcXva(r zlYDarLHg43XAcd}v?}Ead=a=uxy%OetquKL>C3t5_ixBDS~y7#CkFxlp!2z^s}^-r zblS~ws zf=?CsdFvG;_Ll4PgtfgTIo(YJ<7s7i&f!t}941x`OHlp)!~h2EdzLpUvu&fi$DE|& zTqK9i6|hfH>(ZV(XGiucFV4OEljN{^0xlGNbcxSq@2gm=*KamcHm^JpSQhyfJi6+C zZ0QGy1p>RNsCEThOr#gsOEi0w4=vjsNpY|p1Xf+jg>;AAMiL9J$N!{E`&&bpI$~WY z&Cij1p>%2ZrvPddAlHgJtI%Vy@*RIKmF zoWW{fJ2u;swFgYX1s8fQ>n2wc2mV&N9b8P#dnqj-9QOXQc8TXE?#9D<8U#rjGzle> zw67SOhY#?7%3NGR(Owz^r-g#A$ZPtH{o>-BC;qpaNyWkQN^v}~7l{krt{ zD*esWx|+}eKmD(jM8=z5di@v5cL;>I&)ku1UjY`;_= z5}T|{EIPiq+-DK6I+Q#0CVvwZXc$7`OMdx+2{p4*cgC8MDFyAWOb~0ImaA+?yT=(9 z-hYP*o&P8kWjs>1Rp!{I$1T}O-JvtfqS$8d9B{Rlm4u5aq5e$3 zG!f4VyP&9W!OIEZ*qap|Vhgu4kl0}@aAKN$PsO-uV4x^wFAZTu)Qw}>cTx*?5a<<_ z)xeCO&8*%5B4Hgl4K!7Vx{4b$J?l@1OqC^1E>0oEP#Oe#{H-1cI@NV^h2KgYWRtXO z2f@_1?}KAMQ#OvbPj2{5ZC_n1gm0un9{k!i*LS&TUQl{0DN9Wv(DlFhAi&%v`by%n zEuDL?Fu#=Wr&fljv~lUPqohzGTHr1_vex3^R_nB&%V%FvPVTY+aM$cr?akvad?ReG zJPnf|D_S`LrT-4qE@=6h()(HM%@N8oQb%?{!mR#jUX|j8XjZ5t)htc`_*X^OYfa|u z_j)SGjQh$BLHi@F~`F$eqxjkR#G82 zE~-4L*>_F4`RZB=UnIAfAluhHkqTQ@z*Ul(kNMWW)#y!m@^+G-35L$g!1Od$z5=i<&_<20?+{;it3~y`-M7c_(^aQu_K|XgaR- z3ftu2X0nWc5nT3R?K7>~`cL#jHa@9T;C6i+o~})O1~kLQM@B?5Sal>cW)Qp_t$lW3 zU;f0m&PR&eh?BSk8-E~Sh_7bjuT>23ea0%)RJh)p2;OfLYq40Xrj*)p!m>D!lH35+ z_i449h&9;r=)$#yILs68Wk1r}Le@|*2=4fHzdmgyseVDTVd4~}@f1`D!(<_JY+gcv_+Nd=WEo{fNR9nv{fp9D_iK#f8_!Ziubqq{2i)IFjDjt+%JT%H)Wp zZ`V>>sMH7_aAPbOVgG{MTPcNEH38%N`1<5r?v0jwtP$sKqCy{KLO9L8FW^n*nJarf zKHW(LAvY4i!|R?OX&qIsqF>(^)u9}yMgzE^=>ErhA9H(C&s=L_QAUS83Fl@%`bb}~ zEuO2{xsXQf?M!I`x}{YA=Ck~+i9^?w?tdUV@H#SaJnH>7=H>5tmU92eS!&8MCZS@= z%1_LNdp54L)#E-+ao{zA*9YuApZ$J@e$e>B)SrV?AY=&r`(J+E*la^+UCY>fVLJ&m znuHRo7JrHDZSmK<)^2P~?QiBJfl!AvUk)v}qWKukY*g|UxJtRz z%Ea#*)nE4r|52{%c{tEQX=0)X9#yLOTCz90v1`%s0|YffK!sQqvup6;32w1{Np>lo zl+Gno2yd&bL65__s==2zIZ`NSks<7=FB=^6SFQY}P_fw^*E+~HndT$0cMYKKzPjPQ z)1yWKMq#fEsUT!rnb@MA`lFvdZ{G>JugPJ`>hK_2=i2>aoAmb@kZ?#<3vrTi+Jm1L zVn{U@s~rBHja4Qh-$;zT7~f^SK`&=j0yXPTDH9LN_m3@^QM}sje@q?kVlP_C7QRo# zX+DD>e!Ii3LW{mNaUaEBZ^zgWwuu|Hhu=^Lxxe;Jm5SPR#M@gO+d}QB+p2?r*7V;l z)s~-yd}WlBw8)_qD}tj{oxhU?S?{{tdPFW_PW^16YXW7Y=PUWfo04-#&CQ{N;@^PMR`~3s%@mLI<=!>rC*p*(&QbGd#s`klJ5sEQD{P zj!yfmQ0tan+dSt3B~2G9i+No{r!skf z@Y*^qgPvUJ#zz8A)7pNY>x1M0SH|Ka$n=LinE_;iGQ-GUekOQTmsn9k*#eD zwbDW5gX*~Y-U~FlMuu15^0lbmcNg$`X&Y&`Ze3|HQ>7{4a_=|*m?@bHgwG4o?xYyE zyVXVh(rF*}E))LAvCaj-PbJbU4YYbM?KrUq^HD_f&lrgRnmR6((1Z^%kpZa~ejR_Y zM)k`Jn&Xq+4EZg=#D@l)d96werlO1j*N{p(l8mKbOY@r(nrg4}P9>yqN%m{BFoeAl zX$DTkS?eU`l;YYGz|73Czks1A1+DRIQihV>1!BSwW!S8(_#iNYz0Vw9*I=;k#uJ|o zpp3FbBE-JGa=RKn8OKne=fhoyg~~O!P)z3tYWZLk*TpZiIeOA(*R0qic)ADm$d_6~ zsrn^|95OZ6%@&LC+gMv|Y551{(U&ZYTy!$U^QAsF00+wu+;t$R?%M{ryaM3a zX9ORp!!O7P5-ombHOxz z$t|^vX1lkR`;Ar_&-eFM(h8`)P0?sb2Cma~553^#T)bo&e-gwmiwPor8lEnqk3?|? z)N$$U-N|$D_OLnoebC|?fIJMy(0Jo~Qg}=YdGmPQM8V$&Os6P)$tT z!kI5Y#LEbjS?;Dv{Eft!luP)C14DP2IetZtp&Mxq=i#*B!Ya`U)K{%hT$|tJv$O=9 zdk-}>9ovc8^63sYV5!PO`{(7X0WO+B`l{cY6;B~*w=M0$&k@#EU=o1aErSnRAzpgd zT<~2(hCaVJK8nZC-()U&M^*O}agqH6s^9;X?cl0z?1ySJB(Ws5zHx zX~m}dMNICd1{eMuNkxqZb_l=Xt{N=E>7cpBPTb#FiL`kIYeWrxjT$@eI75ybLkNwh znIO&Y^g7X}uL=UTM~ENXHD?tkp5X?bYWM^%!{B74=(Pw$cO{DJtMdb~DdC8t_Z!Zi zb^zp+4u;{5VEhyv!>}h_OdK!e?2RE4As=#{$@f$YRrp~a zB3Q{%)tFI5z1|$f^=;inP7ZgoUtZ5pHxtmruq0mJ%;6=IaEXGTJ5j+UKwMZlo~OBC0e&( z08v1$zoq7K`~oFikaqL+JklR1=@;+ZD^6|AE5?g^T)j`4F7rw7pzky zTJf@2H>L{jqSQ*J5)4j>9?dcX7(uuZm&Ys78D-N26`~ z7fOmo+M$`+dWdUP$JkQCzj<1;p>;tTfP6}UVXy517a&6WlaPaC;gHcruHj97)%c79 z!%>}D8;%8P5I4!O-JCSPPEsyn*Rgra5Tq&PwdtMm+0Gy1V)W@6GQTbO`+-yDR-zyK zHBOUePys3@q2&_&$Vh$6#+cM2|ZGGc-=O~ z_X-)?feE>6wwrz@zGJ1dW633=2TQY~D?o!&91p4>O~rM5ovF(6Ks+rIkkq&AuvHVJ z?NSu5I{fnYqIpq4>v5$kP`RCvYWQ_d#?EVSj;I$lo1;-&M!%KTM_7NDZ0=n@iV0m2 zfp-&HvTmrv?qzCZe7AcC5)!8w<`8cc?Cvoq&BY#MZ?N~dXHz`UQHVnH6!3;37+HoJEOS4HPMyQXgWm*(#k?&90zvz3fZn>^J7NX z0tP&AAxCtt^k(7>ZtLEIW~6@GTW=0IQM!*2j9*@1?28G8yf~d^<9k4iKqacr0-che zGwPDS0r(K+AVBbyU~=+>$hklqM2EzviN(4j336>|&)j#r9S&YnU3)ftwK1T9ajXDC zUZ~=0zKoD*)Z2(ts2SA357sckB9e$EBb3?CTuF|{ej5|`2&bnu6K_Rv=BF9N8zteo z!NgA;oGDW!o?~Y${KHnwM{7_A+X~Vaj<5GRe)(yB;yB_R7WClNOT++H3y}_HqJf6Uny3rX zipKYQ3$1u+j7RV8|Lk>Sxj8Yo24`l@6RmhW4_(+wE~qGi>YS)B7qR^tkr=#Xw6X<@ z_Vn7FPmtMp6Z@e1$#9M%iM_pca-ukXdTXBbE1Hj@K;Bq|ud0V`U(7m`iQLn?M8c z?VpR51u1LegR7|Fu|rT?^cHr=Rc&e8^74a@J6nn7yit3hG7ohfVh|^-CfbEtPp{`1 zocS4aM3egvZaoXK@N*oDIBzwGm3&+lpjSGH&wXa`5JcK0fym2WlNKX%jH z<%Px)H+#f)8c&i$hfCzfLE`jYqRj+llJ;K22hteHB5a5q^JHYBapx!R3&!U?8Oeii z9Wj9Yzf`>n(xRk}?}fPJ3GY{;eUV7E@G{zaqbG;kH7?EE_WdnoPms6i87J+Nh_~3} zo&2OKdhr&12Kq49lXI(%#0kAf+c+~egI@%9M))*UbK(~sx3pkY$i(TTV2I87DM@n` zfV3H_Q;?QNzZ3ne&LjXU@t@dEUfM{T=Y1!7SgU88RmBfmF*2S96QA%B&NM$S zdh;PFX(UJs^w5m^#K?JMc|qmL3RD%gYFTJu;9xB)%p6MN^g$B7O~N>BqbBwPTlNrJ zTF;6t2@9To`&zj>`_OX~?e*aGar(3>yo`qC^b@2tun&2Ls3d+~#+m9MD335XOW{IM z!2-l1E|GSpd_xM?f%g^IJHX*i0U|i-1I3B9qo_!3d?M-OofA8X&pa~Do!?9PYUhQ_ zMbF^~ZigVvb^Wch*n+K7v+5cLOy`5X<9nHm3tnnsXPvR%bC|2@h+%r4SI7-`&cKfo z)igAU39h2B0qf@r?}G3>EXGB9b8?Yf%rS?jkSrJbp_*t5?)r#BTz*!kGvD8x7{M+s zt_o|TOR8*9VeH9{UZ?$4q-Fh@6QjDhXvyMrXlN=CgG&LZAF$rdFS>xA6=4)und67k z7zL-kiwffBuwxTxQU&>EUd=7;P-%bTk^w2xRG6cNFYYl4(U5)li84jzs>E-sn2UnV zh`-qW6;+=n#_%LA_E05e3UKCxDt=ynmkjLULr zqK~mJxk-?gp(b`Ad`QIau($25C{FVIE5>CXHPX$#jQRoZ>0R#MOpe5rl`h14Wt_Q? zNN#Uj)l7n(sGV0Iz^kNZQ5q0WdOfAnL>a!!#JF+YTw@yBRvQ#eu>>wJ>^>jk8NcDs z77zr!;AnW6Ev@K>_~n({BM&&ToXMxRZBV(JN|cqq>7=INg&%g$DWf$!>Al=B;dma8 zfGb)QkCLz%4jWK-B(WUe&(xlKf?VZ zoSDJ4x?IzobdJlzT=2bVMtP@;C*k{(n@kB3=t-X0VF6WwTjqwJv^! zd?SrhFsgKLa98(Tn<-O1e99Vk{;fx(zD^uKw{7OL4=D zRS-OPBh5}tzU*Gx^S9q8(iAWp>*L$gYNYFRM^z0P;B;#+gg9+&6Wx@9rkGhBm#7){ zP1!y54|Eo2&d(;OauxM$_-Z_(GACHm0{d6HH_~1me|oFZNWS}4wMTuuZEg`T_RV46p-mk45-Wn2L~+r> z#%@oa;TXmR+5fYv_axQu&2z>*G^LNAA-{JD3X7WJK=;N*nx9{5#iI0UZ*c)0Q~(cU zRXycyRgLH47epA{f zm#h{Ox{aq&6xZ_S52`yR0)8dLK2yVi`9M69;QlX2kYV71saNEV!o%k#qB*WF_`x+Y2}`sAkaztVwMzNqDWKYB6a4G|qfSUo)Gqnr~}y;xZU0 zLI;42t@QIKuF3g9weMqW-uK^-IBpB)kX=z1luzo%ddXlo2MJPt3YwGWd>?gRz|Z$E z8lu#QW-**8OBAE6Mx3gwx%`_iJborRzZdH+UYHACU~l*FW>vTwv3MK@Er~by%4Tx-8ZHl28dcb$9>?9@Lfj1WW3UK zI~X2DX}7vLem#@X9r8--Mj2{POdXe`sTX5B@++fh?@s#?n=kv|(37S1 zcL{2bp_emQBMt9C1liK+dX^77BKkr(e#z-AL%QoB-oZR2NV~qsdBCaTM)b$QKvEUm z!R1QSNC&I;ZNBKuavUcK(ujyJ10A)`Sc|+nvit}dzFf)pkliMB20I$sWwflCLxa;s zIT!IVia^Wg3+{rx<4$CZ(D#|rVIM_Y$X%9?A>YX;q<=>|>Bjeu67r&hI7C$_qiq>l z{_~lHQwHWV3@Df7i}nR_o90XVYjR6q;EDs$gLTH&IjTgHInL;)S~S+8QF#)@rTExr zyqxH$T*eZ;`4*`;b8Ly93(}GfY#v{^Ke7SOB2x|;U}LAur;Nru?mixJCBUnN_o;Z2Tj0%!4S z5R7lvAvft<4c&c3ynccp-&3F&#G($j=aX9HN910Zg%CeRA&018G@2RVbzI4Zr)DOs z03FMSU101||K#7LqPvzlgO~`0%@-3j%96#VKQ-tMsU);p3vi|@5;wK<^F^zd2IIrl zKMU9@E6l0Gm4+}wLY5=xz~xI}+kN(idr}`V3Soh#3de7k>)TdlinC==nNeU&nvBtv zY;s3p)W?Bzut)3JCOZN7^`*rC2H#sWXK&UXxr^7PwBM%_iNRF;}*V%_Wkfb`n@VXu)lEg!jpX(o%hI z>eN(LBDi#=hTCEFHiLulQ&;i!(AaIZV;3Onds_&lPygW&UDKo^`TBL_muwOmz$ToG z8|Kcx&F;GTrBeG5b?{$g5~{P7^D=5a&wIFx<4?;JNt>7igvHnJF6L=|`9}Y{F4mjl z`fTC%nPlyBBY3}IHD9&c?zN+F_BmKhjYk+qU!cC^Zb~MnaEuEj8ay_KZvtSe^NUp`^cHYE-mO z_iPr)VI&cFY3ng9j_&=`z3J}vi4^KUw;@cBcV|A}aQzzbqWcV1%fC$7WI+APk8QGC zgZgA@2+b39=KZ8QbX9-l)3LUtu0pVhzAszn#TOj4t+;hX?zJK*lb06Qdq_p#-j+v; zm*+Rn!~0sly4k|do2XM}T43m%#eBOkX$8&An|*MYJ0WH4*q%gUBNM^%*vo=nPIaVQ z8dH(dx27(D7(&&|TY^u_VB?6i~@pa^}V9{eC^_ z_=DJ_fH8izl`(0{7JipZ^;9Aly?IFFlez+b?w1XLc=1i1GR7}}F(%vDD!12@RQr;^ zqwXlNHBZCbiG@*j`(9GV+mnE3owfLIhM2idLgq`n3oxb5j&@p2Vpl5@d7WD%_g+_w z)p*zzF};TD#}NZ~T_#G>i@Rcmsi%6Eml}xz19%%GN~Y{0KD~e3vv)H!V}UY}I^eqS zlkUEDH)G%*u1Rw6n}oZ4G8Yzkjov=N)wD83n)*a+00TjSlnwte-tvh*3-O83sUG&D zXNuHS7-iz6@ySJAdHI|@_ve3i+(XT?L2&P^xU~4)j!K*EHMOPT(k2U@ zaoqUuiQ0Rw~$)1%0%6JATcvX%YRRo#bye39s^` z%f)VaF>Ls@rT8WFb$Xu$xV<|^E<_!h60EOd^vBcn`y@3N z3}CuAkNkeGL)NDA2ii|cQfErEK#{`<`42&-nU~sw&*#dJwAqF5R?BtyVgq66KDT(m z4@XJb2gkvA&OUi>9_w!xY-{tfDP8*903OTyD1UB?VPK?r_+M;`{&cc^FA${M$y(s# zNgc)2y+z+|ou5|uE0tdWXZ!AFko8Cv z!5D)T%5r%%-;c+^r1dq_Ijtm+<;qt8p3{VPnGvi0%JGW93I-i zCTZi8iKh?mtEiiN_p;;otSLpEI@pv1MkijX7@51A?_W}X7jI%uuV)jZco|L~Q2GTI z*6tz+hmt_S(q7dA?0{Y88@Otjd}OCQnSiT~UssF1du-j^7Yp+ZI;a~8s7dBW)d_=L z@0STJ&1s;_OY?Cc_v4TH9KQFt@R#&ds|6`+P9mtjQlrsfQ)Jripj_{5OI;;Q0)Cp( znwP}%^m*gDJ*O!p>X!sgbvbGd9uH}^Y!$uc&_qISCxM-QE}C3Nk2=4K5xdv)mxM0W z0-HP4wT$+p)Jv+3t!`E)p%1h`!hoF?kIZ(P^|W=CZt|3ZNCb;*3$>rUtebiJwMxh% zi88!N;Nq$K+FGA@KMxCR^wOe{t!UB${YFoIb>bjGk z0zQ0OQb=~jDJ>8aTBBbXl6u_ZlBI=#B-y|UEx?N9TG4O%GvQT0gy$n!5-KqXFMFyvCshISbb1k8GBH*VMtC!un7QkUG zR{=2AjIGifLY3SGS7fakmS5j5BZsyDNI78&kdQz5Y1Gwp%&l*V$$@3O@w`)HwBKfYIgx&#f z*Yq{t)RKO+(kmFrLbGCoz5&C32Y=jNk=DyUWI#%1qj4w(4*K$uVFMTs~TAW%>HZ+JVGB z`~4_8FQ7u))i`SXQSU~@yBz*U)5lSvImSkKTYxzyYLD=Fe^OZQLKwQ;djqGMCU?Qk zxs8hvmQG`W?^Wak8#d+5jpDkVyqSzh@x#=H@@Ff z{X6|ixE6IM)&Txgc)D@dLFRqI3YV7&l-`>*gef=Y*otg8&QX)sW(;wy6YNs=Rb-g~ zL%4Lf)OIzKqgQx)|L&KRyO}Wv0%Ts;x!ymv;*sg$!Ngmn{3S`?&6l#W0%{=n~Ff{iDTv(DshIZXUO zo+^{a(dZXEzJj_)m#PImz36j%{aD#W_=x65cd9B872-pJn3L_0BcW2!fADh>$y755 zTvFC@3YIb@w!NPFyMywSZy>uaw#Mn67s20`)2$&unY5Y-s8DFPIeST4+|gqhiq4ce z)>kIx@c!7mP9mZ`QFG4Ya5Iwc8fBtwTy~4|*+pEuH5a_tmLzmHGKz_doWDMA2v+|) za?wVFG7X7qLskY@9!uAj~m(r@B618t#?+$JwIoKOQjv@^g?Zr2QJU1Iv2T&S021zEF z=Q4NK@(=8@rQh^kQx|EIK<_&~*RKV4%x)P(9YIkglUL5zx0_P3G$XipudO`P6B_OHQZjS{;dBgzw-RZT(XURY+|s}&h4_VqF6)t&dpg=thr1BT+xkn zJEdxGI&$j8!FF#-Um3!M+R5(iN1P2;&#OAaz_qp-<9%`_pNR44bSkpCo&-c5ih1_` zwOtlo^XFXgZ_3ms0;diIPtFgEZ7zVvcR4AyUNH{ldL_AO+=Qh+@|h&Yq$NIL#Ovs#>YvSkXx0zP3=69sQ9`%iHkay zsXhsnmD{%E?&@WCIbT1TjU^l<-(g@fq}a&OlQJXgZ%P7{tv9_k%&p&{weoK0M@Y8d zc_FNKy6ZKZ6&i9j?QaUfYX7395E`s`wtc#C z=%U|%ooK`<5?Y0{=)CUjG6zN_ZkaFV<)id4-4M3?aozDZ=X%I*J4Q^u0!iCv05=Pq z+QAlE5Y#u2{#t;F5hB^dWv#j$0bOJB^4=*PAe2ZBoGJvLNXrhT6>Zhq7q1z7NNM9u zTHvmQ($3>TZ&gfBI0xQC{lxS(cHBzEHtp&lS~T4IE8XoA-RQM~i#0hObQNOFw`1PJ z!YZx$oX2JsP^!mDnRuRg$9v0~gdaBQt;}d@Erux*`<(=LOKz_ktD5SN#Z8`C%$nmYalz3)bw>u>DYFC<1-HP2)n&_V|MjlmCVTJ{8HF z$IC8Vkxd=$PXfD+AMpj(FSQ$QJ^QvcgJi9o1j=5;`f`8Wn(L`Nze=1^Ip+5Z64LhvA!S+zlE94Fci(H-j=>x$V~h>FB;RFPp!tolpNDcs$NAE{eN9YCb0d}W z_JAMP-iPmB9J%CXNtyYOMBrSW>9@}?$R)dD{dD;{l6LDL$a`|lPqJxS*G}!W==E4` zWC&kU!FLo=5lM3X=D&1B?mS%d`T%7_`<01%{fhpNI~5!sElQF3f$gz?v667=2@>t3 z1@am){dYg&D|6wFmpXv4QEW#x<@@+0g4$hN0r%Mw>7RAJ$8~#eJy@} zgOW#S+)EVC32mdQwN)loe)tol@+{(#xyh>eytqsOTi9zp+4^uT zu*y0x_;F(79)E*|R~s;Pz*snG11a3+33zwwWQeSOh)1;Nx}(@vVXb8UL8sd!W%3Aoq44`(l~lzO0b^Y7|( z%0Ej2AEc(j7bn;~jp|O&&U`^afg#-V)#E@<^PdTUrz^P>q{((1Q6>aJ0}tq@ym*iq z{cs?YGRV1_Kzilr1N9f2Ud?mAQ^x2ZN3TQ+OmbUAG`#v#xowf`VDn-sex8II0uMy+ z=56X)_IT-VEama%8bZ4j=@Dyx9xhJ0!82S#S)C@NsjMm@M9vh?1UHp$5l1Z-ay8C= zP7O~*5_nF3e^B}H{J*4YWmO z>gC^D#kYemjN0K$P6R_~9g$~GMm||=Bs~5S51sIe3}R3eXWHx^X;4~fcM{mMtTBpy zGbgi@@0{8M)yKR=P|o9d6o0ZPTNlssyhfY z_+k%V-ogF+R?*^J?*H2*9RyQ0NryWbnHCQhTb*H2_nq$=!k&!s!@tj^Oo+>VSb1a* z$-yfLjIM7wEF?QA>t|r%8StEx;i4fd!@y(P$BmIjEreH zN*r^3`kClYBsth?f$pD2V$_%IJInexyg`ss&?}XRYPqbK^bWU8PV%Lo9rwsqOWHSheRW1)w>3yH^Z#Moy!4x~i_O+zk<^*M;RA7Bv)JWRQAR_)^u)| zB4T#+EVuAJnzJxIwVvLtonN?E>$4O*{quoThGUX@brb>{Ig< z-#xrF_;lsDm(+%e0etps+Tq>e4CFaEsLq?x)xk_5(Uf1Fyns_MG7H|vW$ zYR!TxY+*4`YAqPRmomy3dxNTW%c%DSxMA!t8^49ml$0@auaW#`dzKBdJ~V^NHZTjeWLw z*6WwPoE;eocGM1lCb;KyBm0+!+3qzwM~*c*x87SYen zh&5d|OnAIsit_144WOOIs@(4be1TW4^_X-}0cvs*;4NB}JH6X}!$JX%*Y4B~c%m|K z{Q1V*C+#w2mygdm5Js(@7A-LRcr5p}{nnjM-W_{Obx2xiLpUtOI9E%Hms@Pk==Gp( z!}%nEps-cv(!{3sU8(q$*WXG7$tak88hy^VV_544_288jINJ60 z{9oON+!v3%U!N^NIbZ|eos<2A7v1x>FKUgb#cB~<}^aXUst z*KMRCD@{-wT6^(L{L|h0(SpHW^{v>}(9Q~bYLTRJ(%EXMV z%++4Wkcv{hBc5KkOblDZh}yKircA_m`CZkJbGH%{7=GW39k<1g?eR;_Oc^P57md1} zYoJV24)EU0*?wH)<>EzS3#cU&u1vU3^4`3pB*PUgo~5;fa?O?wfY&@`H$U+OW_;_8 z*w-XZDI_g$`{Vwb_gn;jI60a?ElT9K1~9WW{U*mTbKaL9MLZak?GZ#q`Tm}AfvgjU zD_46R&!=3na5fRlU0(h+t?qEa>eFTgR5&?o0Cz3BRi4xBa@8Sn>p%vz%$6w=TM4;a zTe`EgIOci(K5s-_omU~Asp#EmStG;KS1q~32(?jGf$bPbVRsCIw*q@^UoaoMwDo7~ z{vPV_m$zuz9*DTz&Fv-kJ>vC?PsgcirjxKDA>y`ArGEUfW2W~nQlE(kLs&59;ccF| z=g)RrIipgA`Ik6EAESKaxFj&2@x3Cph4bTaAg#2jjpXE-1g?#0Rr*UA9DTGVK>Pj^ z%CS-*L^TpCpZoruRMsd=W>dDl={TrOEUJvsDj|km91{HAx8nmPcXuKf6I*jHu*Rri%sw&AERot&NdgOHtM6TFSikXi$gASD zGnBDN1dFEitK^ml&i(Yx#o#EV!Zo#kmfoSNzrL&4J@cg{r>T!d1mCyT-zYsc#CBD#Y+#t(x6YcUW}V!N2nnRzZ-h`kD&1V-mq}j+OV> z*C#tWKk}(KQlDrEW#ZVib@!#Yw$9zYW|__(OD9%xe8JVG?D+Bnkjk7N6#*g@QrZ+5dDM!8KIB>e;SLc5Bq1z(vnx#wD zP$OUn*`IFJ9Zd9h6Sq@!-$1!8Lk2MA{`g97GN>->IX;O#+~w> zqI*&wbR!ahSAxQ$!&L|MKaVZ>)`j=86>r(AYA>R`S0@7PxG#^_e7wb$zUPzWKn2G& zNdP`v{$xQVzy0HNLdVD`UrEpYQ6{z5%3}n1`+c8Ab?kk)E4X{JAoVOupEBWh-tXxN zJ*Y&#+t%{bK0wTX-D>uCs|Mu;Bms$v z?xzovoZ=YZ^up!|5}IoOyY7E}s`R_rK7aYfM+DW2cMD;o?#V`H^Hpo}A4j=&B~zax zwZPP^(~XPw*miusT55Zn+Iub<1P{ODG#Z8_D(Ag)Y`~uq*qhjn(bS6TOag)OvCr7Q ze;VwW_NLvX41PiqNc~g(EGy@6#lt(|2^XoA++7HXg05$$+n3I$ZywnoM1A#WDuh38 zsy{yv)8RWWRzzt9=Kq%MxR?6kms1G!f407`>&|z3<&bxlpE5bFldvZG&5N{7#dX4q zV>F|ww=A_M;W3(EliC)^$RlO*|Bh2eJTVc}i@~G?J0_92c^Bo?-c}}f`3}B%^L2d3Oj>PO z3^nD)lfdH=&6Zs+p31g`+myvK$R6m?0!m-?TW+hCW{NjNx#OK36_R3ZpC zeXB*%Um$L|Kz-#96@HJPUeg(9`MqoCRB1)+?`PCES}#Mmkl=a!a_*Png4?!Ecq2hp7y7Jz4Ww`*(j z&3&-*t8gwZPcz?RtxA@hP}15h#w2MK%2URv1^Tg!9RhEslyb8@W?{LF8fnLXXb2s zHFK1DL_$ylj6VqO@~yx3eXY<&DaTEuRGkKJQ+aaN$kqwHch|;LpHV?uR3ex>(BHLg zb8fuXCza!;DJQ)~3vl{=>H1UE+1nBNc%qMTVpgJcVk6T1GVQ?uskyyArPMc?{zTBY z)2{pXiqTwC)>zvEYLi$b2`CB2b-%bX@uxRjb8H*sEH)Xyv4NA_pH4_O3)KF()Fn>J za0RU}&u#BGE^fBUX1G62roNI$B!N{e1Mjv*_xqOV)GS_19gecn0$I zh8y1te}`CvE?gpwSTP9%iij>~tx$g^{lcpmVQjhX3orJ6U zN_uM-ZL->-ld*L%^)8xy5;%IPy|?PybZEiSD?0I%`<+_|CvH#omfM^ix%G3xYX!Bs z{}@0sZ^I8EV>u_@!LNp1)ZrbUM9}Y;^C6ZuN9xY64zYvO7NC_O9NRzjA!Y8-;?eaN zj(;^plR)E`lCU79{$vKhSmw7rlUoi+U$%1g3t{XNbqVc%+gQ&ByuP`y*TBg znBYST&8v)B@AKU+@jCItMUp~d$0R6L_ryb@Kf@$&z8{* zO3Sv*sP3RniiA%>>Eh)%6ScT&Zu)gUEB$8Th>Rom^E=RrBv_NEXdxcJ@ii zSY1*kp6#joB7al*n8k$ggCEqf4J!?B+j8j3Ltn>5Cv>bNrG3dkYEJ@;(4zx#KUN

    MU@Mk(C}E#MjvH!vz&ylGOgQZALUr&ETo_+{O|#mTw3ADY!39;Ezx$w|0brGCJ# zvY9)XV|DFjYCr8#BIthnWWYILLE@^pFGTw7kce6iMn9D)O&6m~O!h4ueE!j+YN(^_ zLp`;P$4CP66m{$?-cy+0LP-$h| zCq?5$s*5NOm4>X`kj%HxVexSa;o=gOJt4oItpFAr;AMI93LM_*hW8=fhOzm>Ela49nduaiq zFP+~XIGD`UY(ErTNqvC3G6*U^z5nhmQ1rO*yq5EE>agDJNZ7FnkQSC=t;aV5>-|2!gH$YcUJIC1e;VVA+h5lH<=wY@%Bw0Ugt<>-#!Yva zv~z5J;9E;6h5jUvmE=BtO4qHTLNk+jg8J51iyZ!Kf#aSDmtU?fb?f*|odn}4f+ho0=x96Oi)c)p1mn^^%Kxk=gkqmtCzO90D;00L(sGS*g6cJat5M`+ zm9MFlSL>E?r8ksAa2%+8o1T0l9gwiJAw=&dwPrK4z)RkTQ)};hY?$eFcP^#Qto%s? zMt46>d40HUIZr)*<<|ld%5MnQ9-aT&p|8DWU}IcIA=O(3gJ9&-{NFw&&z=1}=y6<) zG8)54;NkX_zn>QeEe^c98IFo;y7{NON25wvMZ|pG*sx0tfcBUcy9=?ykG_1FPXHa!U{O}9D2*5O+ zI!;XDFn8s#7MO=HCR<^Ob!i=0-P#>Q(6NKhYcrQUY8n0MbPy0=Y>LFxb8!4$j3Pc% zpGOMc@-HTcgZx24<|+df@?_Pj-75Ik*qN)cqVYusrkRxJPBjIRr`%DF%F#XV%y$X| z;gYvF_`u~ElBY;7NB35a55;~m$C3qh5El5l)Df~*i*^wI7qbRo*7A~NBLiX-_M73+&e9Uyu8ow?dT1796ruJ+sGxa}x6;73aKC)j_| z2H~l{UzQ})Y%BP1H~T-R*C!f=-e5t#@<{1ngW}0zv6L-r>%xn$ z(F!z>pf$rWi#>_4A#8|`%~)JyLou8-84DlM#nfE6jd0Kh8NUDh#Ubn(1$OBPv_^Ue zu51>Za}}A}+4^J2`YTxQQt{3IpyfA7sANBAd$5f$3yoGK+tvOP4DeiJ&O&Rc)}C*K z+hdpT&qCk2lCr3ef>-Oc=FLL?GD*G-)liUg&418mqjNE|wh{Hi0fAX4p3#<^#=^xf zNo)=a20XMBoW&Ws#DbVTR z9qHNfx7WDJ`J4YiS5y6Zw-K%tSCE~BIw1b*qB5Z;O06XJ zkY^LD;*3omQ#@)}@cczR)miGSai>h{jSFiA+xI6` zZ(lO(jgHow#kx$AWojBB{q*Pmpk4_ityl`&vVX7kEOeM^Wq&oa8A9TTCs&&<{;R$W| zEHr=`x$H(5aY_9@DBh=CziE$1bSH@&sfNbU9D1|ZrbQUtWrXi)F*iuEg-wgN!5o5X zUo|YQtvC3;lVgJ`TR43YDdbQx)O?d{G>gSYFgKhx#xEN%H}tVpW{#0nF;d`#rJq*L zQti|yp-&L!P!E$?s30n1qkmZV#e1?z87%mt#>8wE`v7BGjfH76vfhGfSYg&;K8v+L z*o~@er_@feY&Q$uU7%z!i*=?rCNx4rbHo3j+w@3I8I3SN#Nj{aadP2v`82{Z=dJ%i zdsJ}+WlE-dhDla(qd-0OujMTI+E2U^qgQeO!Cx(FXQ6Y8@NYWzFj$2Q$= zmI_;2doecgGX`hOjo=}i#yW_EKcr@3BYd-M#&MRs4WBf%|D{Cq;%VZrSjqAFC6ewE z3tF?(w#?GGr;;LFat3SSfBy&Fc7W8=0W3)OKkqyX?O03F?qWPRu_i- zH|XIcRH`2=l@Q%O3k}3&>K&rzNRho(#eyQ=QxDE!6(}tmUJWIuul@(EY$Q3|u7t%gZj0x`4D3QAi?vY^jS$=F#a{tD}Sa*xSYN_h@c z!_B9o<7cr`A|!O88fqE;{r^x|657au_h?NCvrz0*ZZ%~K7e|qjj{XGV?thQZVzCRj zHHNL?MvY223mUH2kvxlyr97X6eCRmMlQIhp$7Q8yrAs#zCjsXxubXlEmno4-kPNF&_gCz?NtW>a?Yd^KG1!{|RK{|l0KA{nm#C0IBM zO~IUc9882LFH0%~7C+Z7p2aHdCd(u?F3K5WT%LvIPzFIF1qO6w{RhROyN9ubH{2&V z-Kd7*?X6d4vH0w~``yFDcf7mlVQdu5q+Exne7O8!;*D9dGiNAWpAVhaW!{{Hnp_~E ze${Zm^K$tt^c1D}TbzOQU!hyGP=`~L6Wa*gCX@byN>VP*XBN!+apKM_6dyYCq_Od9 z5=>7`w(5&zB*)LmkZ4V;oTYm8hjKMj;Lys(|DZp1;X?8DkW70Wgzr=HZ!{UI`@F82 zCBx=p+j_Qe<66o?tA=`agznE`Z(wXHn~a6FRk2m(agb9eG9B!RwtqNFm9b|IhQ>5P zrBziCbAQXE%Uue^6swYlE@M zD5ZqOwt}K1eVwyd{$_GM9m$6e!-d|?Lh-=_@1@iUQSU^yh%yysoos~mj;7tSRL>nT z`lUVTvF=hZ#=i;2+~w_X+J*FkxKi`s;ip3%X34p(kpd7Yur6g~-z-#((xzc7xJxSF zKj*X%>n< zqwn5gtn!3f#S)|7OquT2S*+$gl2b%JbQK5~oQ2|mX7>}eqdz|n&>*5r|v;{tw!$O~+7>0-Z~Bhi0L8pJ2=clHr&;`^PM{pGEfV?rNx>UHTtX zhEgn?qu^2K=zmZ=IeqNe5N{v&M6roakvMZzN%#ZhrT4HP@1W-BEV&ZK4zo?XJ;{bn ze*|+pmWmS&?>R6-^)I~jw< ztvSXqJcIq1xYl@5PsY24lBz!btplA17-Vs9qx8EYeQauhd61(!kb3T(oH*vn-ReW(k_3pzF@B( zfp?!o{*cw(?corKjX0ow?f9G*+Z!^}fe(l^C%MvA!PKCZ18dYA4?P{q3Bs*8CQ6oJ zaBv9i9y_3@!D&onFKK8%Q|~t(!{oeV(m707`dIvKHrY;fE9fL~H*!wIas8Kyn%COF z0d4^_x~XDNlo;qjxf4s^`t=)vNhx{l!0|^uM+;A8xfb^v?>0qz!zqmNznIndg|7q> zQ?w2;Z^s_>_lRBjVcLOj4w7;hpRPUm+7&-K#7tXifuA^IrbSwaFQ9t($a{{4ox<__ z%aX3Gc$^eP$4_!0Nz(e@0*Egfq!SwAtZq{LOFbN>Vb7BlzC`s38L6bHgWaSGNw-=B z@$u{QlWs5<6-^(9k0Jipo>?P`pE6@+c6KaXPA%n2M>&kbJ@W)M*M+&hmWjGJAAnK; zvV&Ev;M)zC^5!q){iZGFBZ))kPgo&jeNo4+tuRlES>Okln5R7h$?0>t(*nQh!_1bs zPYl$eYO_DV3~~)t=X3y}ryrH&o;2=f9&XFWu@VQOI2-qx3{-Pn z-S8_t%-jbl_^naqIa>|!MebzDf*y|M0MAO^df~}2!uU6CGj;*b<)(;#UQNw6D>(H^ z_ud=p+M&><$sfyrJ2;If&acfA*QF?GtYEppRFze_w!keFvVlE#h)YI+nZw%ZU%l<_+c%KV(qm(uup|MdkeUDfsa~W`3^)@g3jf zD^O`Cx3LRp7DOBNehN3JOK_einewcMpIo&%wmz=dG@8{hu@=~Zp4XV0JJ~@Z)+5wj#@sCTnw~c;CDXhvyllV10I&+y@o#->i89jBFj0XJj zZrBB}_Pn~rjr4%8mS_;)e&fu$P55WWnb($uE(sMRdp5U+L!mKb#OE~g=c>ah+IVGu zqV(mCUJk+ju@UZvKMMTQ_U;q~Nd(q)*ZNn%Z73~O*1!)kF-y0)!DyV;xRKiax^yAA zystMR>y0O5nXoH<_hAGI%<4*INU&f z54rVR13%u&ybat)Lwei25I+jeyu)iDv0xk7qm2Z|swWI0QO8ZXZ+E^9eqnJC8@oIH zA^1fk=AD5;@qGrQfB`Wa1Kz?M2i5;~hkPI@Y+^VPob@?&^?W<2 zG_L>nJZB>qenn#J|HY^fGhC)EBqr)#j1MY9YbP-iNB_aBhK(HG@m5HsTL{_vm7SsF z%Dih^2=TT0yVFUecXcmO1L8{(_i9o|Ls@0xCVtA3>@chM9M_pjT)}cpKlZyvUvEQ> z4i1y5P(R`6T@4Dw(;t!3yT6Cy*wQSna-mCgcfaS)`vy1w4iT$sQ;1P~EWgGHsO?>|ml0u1-gFmmDN5VAtGm}#$^Aj-_V8^aL=05QbJ2a0hhDo=x zL6>|rp4ku;h+nQ}HcV)c-ya?;SrDDSa3(NWe7QBr^O`lx*}IePG_M|)P-)_UTmk^> zr#_MTA<^7JO1#>e__&S5;Ey*8>=o$dz`wZg8xPP^6<1PCKTT94)%Me>`=o2z82Osq zd3)E~jR}P||;HPP8DkOmn*^)WPXU3dEldULDa8KSs4M z!WJBO*dg%gx2l}H;6ft~NWM|beAT2uiq%3C;-|WpEhQGjAFS9~ezlYO^mUU4F^n71 z%CQ=T;dJeN{0Z@CH-P^dgcD%NAnV31vDLtri!yBDv;x6SsC- z3*Fd%_iKe?^>_%R0UWz@9tnj7IGuKbcvZe}6~#~LG2es{^T^<)E5VtxCS3#H@N)BKJ2hF8e@`ys>xwgA0)tI4tHJrXLu0 z%Szlh@VLo=MexNm0U|XX(K6IUvjIF4w6B(9p{DG>+w&#mC@>@`2zBCfX zq!Aco6cW?eBo5vgCTHQdA`AR{BJ zmRcxrs+XkL63ubJoGHs)%U$%Cy{u6R@qUM|mW?~869ae!j2{ajK4Y!F?tO_{~S=pQR}<0g?V`fR80C^GL2D-bC4im15I5FNC0qOICr-RZ#z=_WVH~nUc83-Atweu{v16<2BdFjr-ju zd9#Dr#pfOlLPKt3P3+x2e2u0i^VJzh-WI{Vdc0PSFWQNp+Qzx!k#%I5vK5@icy@^8 zp>;#F=G2Z`fqQ!tC|VQXe*?VE=3KZolh<`d!El8Q2Wq#1_JLs8y0`J342GA*F=! zxhK>0?gERt!!28e?4NpXEZ+F=p2_pP5&RE>=Lg8)Cj~qowuOED2G=5R=mZERyyT2j z-0n#h?*kXKFlejg97%}3kdw2`n(fG(1By1eDD@_tTjR+bm)BZq#J-Gt`T3MLsBKsz z=R0AwL!{Gzip z*1_cXMOO_unI>Ibec_r$zUv7&vV-RrsaQ*cbvAf0o7T4(TkMGq~8u>^`L6PTHI|V8ePo1845t+Yc5I-+P_PGJ+Oz zGm=YPa&Pp}MW-jOC=Krm(I}x+R0+r0bLZ!yV{SA}P08*m3n*^uD2uHwg%tbw^~~hqB{7&&vV&dg*Y)}R|N2OeyHRb+vWp_ znsnrWpXQ05;7lzqT_**ZikD_8Er(1e!xMzK&CgmVruq6DW$vfv86ZujZX>BJxq|DQ z`(xs+oRrX&TbA2?;fQaQALb+#L)y{Rt--yu;$%f%UY_|W{?YZa#zv++P$bDot;5RW z*r$-N_|x2S$sAIhUliA2)Ee2!UV=nsd*yvSp0-+gq_9GD#6p`mM0ktm39x_h!-J%3 zaTz4`n3EwM{y@sUJOYW|GBB0#mNFF!mPFq2hB;Yzmd$l227fNAO0*U(VdqB%;>7_> zZB&*V{(Hh(Gg7NAmq~B^y%^sT-uZX;7vjzfnnk_vWN_0abFDC#oopyL*J!9-&UwB2 zrvYX-Z(JesT$wnW$}3|zZvV#K&dP?{O}FL@+jkR7O=p1O9+5VA8-v4Q@gJm@)e6+{ zX{599>b1{xcl4VOs7)?_&_vv1J6L;Iz~Dy*N-dB1kTE>q z&e-la=6Yv%#2)8AMj)_Sy{G!!&0u!=)pM?EjOK6OF()bxd<=VVA*1i%9UJeB{FuUA z*hTrVNfXg142&&9YsIVsR0r{3CU(zo`#JAblUcFmHkgmF>)0<>vY#1DYhQ{Y;KE?u z!Lu0t*~U*WSBl!BNw1oN*;AKurZhF3G9L0_OUf0FM9T3KFpGgHGvNy{V)Bk-jX(w9 z9laCKL3nC69zskpSE!G*X z(d8Ey#g%th?mbO+VEo$Bv{O(Ew9CC!CEApcZ#vS0c|Wq)d-ZJ>XCLm+4?p}}hF}Vh zSSfPEUGm65XUR6}6)Lri=1HN8gV?xlOMSeS=d7=N(R?8QYv`%kq#^dfYDiDa%AFwZ z^E%_q@q{8LlCvH0$!F2BLEAHLwKPM6zQA5M?KM`z5T^C+DivQnCqn0$<}UqD@Ah5V z>&PI#SuP{O3>XF`m3LqqCd zEk*~k?8grNU9(nw&Ua{j1EEnps6}P7Isy@6!|1s63t{io_3)o?_`S1RiN4n}j`-gB z0Dq}?dK%@@?u>_LfBzyotwD3YbaObPWyQcA5pYTh3L_s-E@i8s$AO(>L{JFyQHn`x zN#WB%SWcedzv!OCz+&K&lTnC{O<<9n%;MQ5Qnq#`?$vA$q{CK#EHh`;HF1F#8FEJt zM(#I0Flpn^eu~KlI#2mG^IU5svVrH`1hSt54M{+gaT!6^@Pt}|UxX|!CNJhW<1vGY zJs)fLSP$eEbj2@b;};7}X2HJ}_=W4_Y~dG3JS>9$5AutSWFY8$s9Oed$2mg3quF9e z8W{#3j!Ws95ZtjUYuUZ9+5#RAAy&cp#ml8|SuVf$gbFT{>#Hn8%UUm>E_sm4bG97>6Y@(6Gl~QZ(RKV{RT8c(62Dk| z(a5r|FBybbPiFRG*`M*L`Hx*LHM!X0WgsslNUAz=5fPgmzebDLb@(+ah7v}#1`pGC zT=-fub`0F#&##|DTy6aNWYpWu+kp&RxhIp-txwq2^cT&Xp?~abTvkyfsb#ANbMkt> zu=|IHZuxj(L1ZCDzR}N<&T;=m4&cFla{!mWQCG%(7!8T3>1<&OB!0$=3#jca5OD`a z4_$gHE^m72r?UptIMY5%5t(iT{!2dcdIAw$qTHoy83>_KQR$*V3~A#Kjx$xw2p{rK zd|=0J{kCXg*%HPo+9`tw1kug8;xO8qIUMJIw>Z3{*AuP#T7~^@V#;XT;Z6BiAc)1J zF($ceeFsRq^&=lO*0Is@XHW9+N}{+f)eBXZFa7-eG-^R+8)j>FM1p8&aIJi(YDFwN z^Txy;RuE}D0_WW_KNrl>{MI2@Osv(#Z{qTb*aEBo^YMq$!3fAFILpDF!Y3rDh=`fU zOVJFRCD9%)4_F1;oHNL@+xA6Dn9%lqO5^kvGGHbltk;2}m7ZL``}I#cjvj<3Q$A$b zemEX18~hF{De8gC+7ZMGggfnAE=QU8#m<{}GJ)un{7%Kzx8cMtrHs_F>zJHv@`OZ- zIT1Q{n_Kj#l$uH6J(&5IvNm`Q=<|B}A^Z0bYSatU9?+74*NHzcQe_#} zgVY-t#i5(OHnhI@XYcK|6CNkwdOLR{NF~cn`E9r{e53i=Ont9^FdJoHhdA&~2?0I) zL0JM>%Lhvm#E%A~m%Tz*(~LVCR~rwT`lO3jRsZ(n=L39dWQJvbYQivy=HhDQk$oRH zjX3W#u8SEiGtZRz3MP>I7%^n|J}5OKNhY=8q5ehVuwxeoSuwH zkA#t*3Ghdq8sTbxKHXNu{&a)H-{x!+P^8!Rv0p&!%$Sr8KQQ1knpIX!VdTTC@nF_n zNZR~jfB&^J>&+)X;BU<#($rxdj9>Ns%@NJ>hC3X{9p!+Lq7+>5u|uewu=W!h=wP0B zcU6p}yP|h6+isRd?CsPW6@+d^~Byd?>tO;`g5*$&X2U{q6cIDB5R9F)A#{#%S|N^2L!v) znMZQCVuC;0HCNn1WyclkxYG9NTY19=QLfrz^*F44tOAN=mAK;*X{m!7+GPG|>-YYv z3Qu?p$_&P$#C=S0F51WnH4uuR|DR$6MYg)2t#M6Tk0+fp$yuRs7XHof68QVM7axg{ z;QY-D_C3hqnTxKmPYZD3f;3$mPkr)fLF||_3Gr?H^~&!A?g_PJHJRH$(Oisf;}OYv zgnC>49YjWCS}=P!cvymmlrWdet*C;13e)S#OLOr#{p{BwIr-G_JKmPW>5dNa4A%d5#pakviPoGm zO-`m=PO1vbg#1-wr9~$q(RFNS?s8OPc+$X0WA|KJVQTjVw_;O!T}HdUoZAxXoYXS0 zyY=yF(WSuhVg6h_lR$KnYxW7^P^y!h3TAVTNOq$Dn z_Gq)zs-Dgv+X^mo;noz#gST2H54Ik86?5&ov#F@$&YVAYFeUAQvUhs)aQ`<*!u$lQ z%cI5!@QCt{LsVoq9#`d}ow#}VN019!2?O<{8ArlUW42;6&(x8@vzuZL+^tqj!XP`;IE#s@`F9Ldp^c<;b1q?Am<8hZ&15~ zXk$olEou&NqON~S%cyU6-{(v=F)YlJ!7x<#+1aJcK#oeUv+e^OKIe}PPipz3| zURZKKPDI_3S}7UQm&T1K88_xt2UI~e)6xbbVwNq_lM-fEW~*dNK|U(mZw@|jzARrV z*4-wfp&!M`YSyK%X<@K8f;leia#FLzX-vGBYmMbmK0 z%Ri%0L>;XhBn7VyUpZ4r7Hs7;B{8$TG08AicBIxKPexK_a@_g;6=jc2+Y-9NVk!la z_cAd3^Ht_@l#9!(TF!1{%R&5nkB~Ew)2dySM0~G4lL0OYzQ$W>X-{2OMV?IKty!nW zb&5h+e&)7^;m4U&3SAh)Mjcb0x`)1pjZF4u!DS2DXw9_*($d-~SR-l&iH#B9KIChE zNyTYYj#xaG9n7peZ8awOa><5`I&j(vH2n`wc;gJliI01+oOBW=>T*yXTLmbS9>VP)iyMC_c%^jLSLF1Z@{}uW^eUuZ)sR2u0z$Y7mjMmSmPH(?R&f;*u`Po}* zH*6jB;MiCVO~&jQ)mzn(NBG-~qb7_tbtIh8WEOm-d7{*i_BhY`?qp&QidIAFbu_`> z%{OW!AVb`n#w4^$+(`?$#&NDO$(e&XOmsZFdgeM;yi>u>-Q+}qG5Dqx%HgZP&>QZ- zXjoWZxUf>=@TUH!(jRLw%mgY0EcReJDQzz7n7ClgIoF{vp9F?!NyW7^3&@#m24TNt zmW+%@OS)2w^Cr~}azkH!Kg`N2GS$@`VYukC4mmc!e=(I38`l?4DTE90U!vt?!F(x@ zgJYWiYOExB%GXtm#C+8nDm9)@T|6>3q%1bMc$=Mj-0Ek>dXVQDJ^;xTb8#Qlibf(X zV~qL5we#wdwPUPCPr-dKzzzMTgIwUJ+5C4;w34u^ABAWw(fNLQs)zvdGhazIf?vbr zL|O|P$s&Ca>K z-g8wyeIj28?TpEk7uwB{vkkK$an_vu)dGn(4T{PP>QhjlWxYOKAN77^YD!v#Nfa<4@Q5E%~U&Jel*x{EbIPc88_?^ii5)1aE^vdy~W9 zv=`dnRk0;aIMHEzJX=`?iD&VoR~_eMfcP%(yvp+KYf{T+EY2*MaP!F}bCZ|u``2&7 zGhM5S@)nkBNSJanzCOrM5#K!z_Th_=Hz&jR6BQ6PxHAWIkKTNDdeii6v)g@vb8JsA zrx1koc#eO&hKVIl(%COcoapqIIsCp*=yZZiWua4<6ivXS20S_o?mWTpx{7@Rqt)3? z#uiP3L{tQ5V?q~34!;~0x;Sx0mX34}_77%n{&vYzwKY}O{L#7Xoc0oAKb9Z`>c}9r zBm6sF=$33GE)6CW(NWy&sNlRKoFuuaxaMGgjTfElf#Sgx_N^YSM~p$MAlOalt|w(b zu7!jh@qP;3vyiU1iYO!_6OoAYq{voLOQVnI_=Dn!&{0azHiTxl6gHTP~IY?qUFo|#@LZ907{F}N=?jOj?? z3ADGWBgRZG2FpW&IbRG;2*JEoW#vdMy8WWLbCz44k7WA0tk>SP%eUPF=7JePc2}q& zBQ(b`)zDPwE*V6p3(`s{9M2Ml87NTz?#r-nED1v|E!Jd!1ymS5AVW<1K$TB-W~_T- z(eMp(Ew66tciNC4AsfQ*dO7%91j~^|5E3g`PRX_GMgxq>%emP*{EBDqT~v3}Q0JL7 zvSmI@AxPI~j&Yq+T<~$$72grrd#sss2I15hY8Cj&}c^kyCt( znlyxw22#sg^xFn0P)Om*lI#-I&2pPdD|g^;`}=P|IzDRd;+d0Bvg_)B>;R6<;%b-C5-&61TkJ1*`9OE;|5%G3VYT-l>ngwg)40IWH%KA16 z2E(CXYpx_S$~IAIJV8PVH*rHBzW;jF+xxRsoBfl_jksEfloM=UH-Pt2u*;XaEa1sB zt#0lzRc~o&byl0_t}_J_;LznyeJoq}aonPTn@iEm%!xN%JY5YDhUUps6^`V}0@F%u z<0fgmw@VBJweb{g&uCX_zY@lTs^GWq!kF5G0);^5O9a@^B#E_g$qN15Xs#MP_{e|1@ZXQA1Wg>jviUcDRFz8Z^h452Y@1 zFUGLOv_@)YJiL8y)~MHbF1)J36-=HN53XWia$AOYol4m2JeU~qK~2Mm(3nS>u|>eP z3^2tMtl`|prB0oK#nv_Wg?qjcT8Lk`*Rszi6QQ&L)xqo_hnaOVk9Dvg=oee} zP=|PRh3SrTYo8uZB5#H1*E2vm7QE-n;qT#scNSiz@F`{#8Hu{>2wy8`?y~gnjQaXH zHm)AZnBBW@JC2hXBdP41!Pb3+M6Mt5!cTt_r8t&E8Fj2`?lN@hx5k9}9kX2H%h!*<4ZZ*r zDM!IzE(ADcfQBLjG|1V{z}*9FBEVE51ZF0nFx<1*jwp|?>bZQb825&ZXP_6eKbLI@ zIk9`vy)V~yt-&*P8BlywS$?GZ;kqDp&$^Q>?G7Ba)2?b*)gGe4b!ec{|BrOUd4qOK zAqrXudLS3WDNoUh;$HjnUlw~@)Xoa;Q@Wp_!c?%il&p(j|H0s$5`yCsz*sAUn5ZD8 zMF_!4uNXvw#8!DR2G)LGyB|Gk(Yr|;eFBeH2qu|Vgme{pf0CB2QlJ@+xYh@kuS}1- zZ|yx8*K+P3da$E{%WCV{nyS58=WgDQ9V>C>ays{&u=dwx)4D}vGW4%Oo!{D1tAnrr zw+yEK0)Hv$gR5Dv9jz5Vt*1dDESE5;-M1k&v;TJRJCr($eV`wYNfw!7DQMy*>Ob1Z zMXta2tSe!JJZR8{J|h@Yy*L%^x9BvlE=poB0DIU0`Z4Rb}Kz_ueSjrIjgiLmsY ziv0jD!j8%<8viqH?*25tyj+C)1Ga2&UPJrMbVc9e# zMBxj|E~&syBt#5Xg0)YGNFt$|LPYs9@d@Q%dqdc6?_U2J)bMlqc9W75Yp}YB6;?r& zf7o)6H`OZVvQ{37-xo>_jE%pLX$bN=+e(n#QQQOlDZdjo|FZJP2>1xQ|2f>Z<;U2K z!GG72;c$rJPIo+kxCr+;-+_nr%)>n40ZS^r>56(^^tjvJhzqApmfnH01*-{C;({jbuR!b0SU8VX9CC1ZWcMX&kS5s%S4Bv{TP&

    G!fI1;1W8yOl410x6_zrrD!J3UUuo%w?eqDkdc}fSOf{2>d%I>3W>T;Y*1D?D z1^i;|UE~4*!ovE_Ti}?7us%&Crp+K7uvH+7u#5U-0~`MC^L6Mp1LiK_FdIe=7SpLo zsn(O3D|@QH@onHqho->GSfPVctipyha<(M}68B|bBNR3o$Sn=!q^rGL(Q&g)?F-x$ zyC#h8)HMMdtTec-+7-Py_;>pvucvE94wzTglc6g7#zeXC;LU`>Pl2?ywCmX7G$Ce@ zvZ4*Hig^re-uP8!wP6}d;?W3a;XDRRh%U z&avds;x8K*Erw8}DEHvZJRR`A6o^Ibfz+lUXlfJ&A9SiZkv}%RAyukN;}&kJjkylG5fWKX4%n zF-3yyBqFyZL|?INq4$evxT3c&WTvt1M;s1r$sOT=nQQ@_tJP6l(rX{nP7-s);1lC^ z!@VpZZ6z*OvcC)=Q5qpy*V0&WH`F&p)hBer!=n-V(h8<)5cj#TEmUfGmPXkpah&=1 z>wUbIoxC}$Pi3E1h->#Gg*3Iwo#|0}zfXlM=2qT>%wXxi`_Z=NO12U#G22I(lhNEB zA|-O+_Puhp5QNd~@BJV~L)amkW%YRlX~#N_TQF_ufW2GJ8q~hoRg(Ztxjm$oNH3<( z_u>KPxECk7tZ12+jJ1rnu=npMjex)jA;~j=EdxO{sm4!~QbTlk5c}FTerV3vr^63T zORBzwTb7F{b_S9t9%1KgH2cZO92$4Oj=TJs8&)4{`pIe3Q){@!YyKTl$gpTNq&pVV zQv1>m?STG^EMoPLQFqdfc!e6Gz+bQXWqNEmnEfxpViD#~8nS%ze@eR_Ry<}Su4gf| zM3lR#Apy*Fwt_Bx4L3E!TZ>!~CG2Y3!j^$-vOC6H9Ogp>A6Jx-GSuQ@oc8?bo!Pl? z#~w(EyPu=k;*e|@gD5=M`nR2}K3olSy6#)Pw4W}+fYiL#Qw}amVebJY+$|yOVniu78tes~Cm<-M zj3KAYgp`X3Y_%s&?4KlK%ky!A_U}N=;#qp~QKCSnTPj9}A3bz5X~R?K;*nlQH{xVhHAEjd!R2|v z;fqH5nl{oa85L?2=~^5Uo=#6rt0URCC9kNIbx9quMu@R5CP-&DHN=5Ij3YILh6b_! zMS^e}!)a*W@L)A$3QN_%BMbimC!uhpHb6XcjK+KfvHYdqGG<80^xq&M*<4*FTAp1Ju~dah)%;PW*Vg^?pfu z2|JSc#DS!p@t~1NTE}vhvZb)Zk6Wf&KB8`uUX@(I1%0QjZ=R0MAALN<4S$9A@F=IC(o0;{)Afrgp-L>xEOlS~(}(Hq)H5sOqCVZ*GilxpBd;3j4~W;%XE?Y_MmSThgfn!)S$nDF#4ePv zxggu%CO6Gd3u6oKI0S;BT(b{B+SHLiyoUNg-t>vY_s#hGjqox|l2#Lw3_3OgD7Ore#Kn z;Xj}KCq@o9h^PtjV$};GNc@wnviwA4p(+Pdcq(ny9lhC6dpFPO1vlQDqw z5_(KaxD+B4-9C`s;XUVl=_bFB-w}V0aJkkikOnsim$KyID|&pa`F|*;q1l&Rn6OSB znBsCba^^_59Hcjn;$WBJ=GI|1~B$C^%F~h(6 zku6S_`tP{X507#TRUb^em~&pU{p+9l-Mw+#bq$1)=i#rO?TDuX@RcipFNx5egQo)& znveiLQS2BSl=BrWE-V zbT4&5bNcBnn)K$q-QyMR5x+D;!MP}0E0jXiqmZj5ha*NpuA8seo?f4D&K2FuyB`|R za_h`G>2>E4Mwb~56nCX!AlV(Y%PTn9u$h1E(;PbENBiILFPM>V7*x1!pjde{g(aQ)R0+53=@F!-f)zQDVyk)=fO3ff`9KU9k>k|BJx(1%k&d9V1Jr7Zn#dwU)qnzjIWO?85xN z{tXEIoiYly9z3YGJf(1IOSlz%Pz;Nv*B3-`FJ<*hc6@jjk@3&cJHLJtZ?SOO0c{io zDg|h{A=#e1Wd*ID4`*-aQhH5Whxj+{R3#voK6k?tAiP7!)1hj7-Z~|M;m)ffQGdd{ z&MG*UFWieaVoO3Gaomr%j_&JA-3C$ZO)r|ee}h?ZvfcI|uY8v4g4?U0*DG{Tp#LHU z%tB^DGRfoVriKJCK^kQrq1Y|vq7yms6s5;715Hy`C>L+hO`La3;la!&hOqlZl!ZRf zlUq7bHKKzn`m^ZM%FBICuE_80{b9fiWY18Uvh)8&EE#D0gJ`9A_&6CK{}8q~S$Eao zxltt&w54SaCVECnYOT&&&={gNNU!Zr^R%vQUol|0hK%V|^;fjjZHOnPm!HMy7{L`I~S2eWwiye(m`g^Q(5zZO7XpcbYmG_@c$!oq2C;O3Y;}TgEe; zi6mGwNGeWxM9O3ps4lg3d-ig@zdOvdRT|?mK~X4*P!Zw2=yL*F2tJJBjmR19#Slsg zs43v(a5<6vUap!zdj0aI)Ux#lDYK-4yCien2S%Ewf!sE@4MS%M>Dw!HWDhuraNbt~ zNL0S?Dtikt_mny*EiL}_my~m5gR}R)_~No~Qr|0cjIsP{IF=v|3iP$Zul+w)MSDpj z{8~CQ^NDk#v|L5Jpk>|(aEXXe_E8E)*M-+TCRnjlhBtFXyN~}ql$~%wX4W+48#oL` z)4PqnE0DOzEbZ0t2WDqJ?$Ur@*vEyCd6&8PrY}{8D9L`mkb^@Ojo$-TKY=#%x~f>L zolnEDD!KFH1|F`yXf(sbY=3@B7ZN5Zlow5cFcYD|^aWc53#Hv zJd>DlQa0+554SQd33%0jEP%MmqZw6#TbSW#ZdEsD$8( z40jfu2zfaB|>eMYFZzom@L7;tilN=8e-g|x!4OUVof6eRNZ=XXy zxOWd$bZw-Ztp}D~`y`yS{DaJhOqCD*|5(P2A;X>TfXXHtTunIBOr@^;f=nco`)&TG<6z~?4MD@TO9!gq)9IbWG0~hA|=`k^O^nw!roO4 zd5rTMrwibfIrwp!d^zl(6iVIH6%w1G0s$)%~wbK@Thk!q2h&`8q$mr ztLFq^g4zEfu#P5mqNn84kusL@(XTQiw>-Kr+lHu7Btj$|MHVBqQF?<*y%0%|5Kav- zf-BULq1dxC*^f7HqE(PtWm3but2;C8^s8*=n0_0-T0H)1Hy(rOG?c)b<0RksE2rxB zTKL)+%c}MKnMXVEdTWhC=5TyT_;$^Lco)7kOU02iawEK)^Zj@_clU7F&I;#o3E(h^ z#T4JiPBQ-4NMl^fxhG2o+}L6{+2p|cPwwfkxq!Ix{b)K-8-KXT$#Va(o7~PP{CJ=e z%VO#IQkJ{-%j)*x+f%gHk3HNUtSWJsqRme#uBR@fFf2&+u(~s=EjD|zcfgxjxNvZa zwVhbX7J#eb=N|=@?dUBIrmkqF^xoYaJ@;i#N&5@yW0;g~e*@gqkR~v0;j4|9|A^iR^i2Hx_q}RWT24)k6 zd#pLlNIXsdQS&^Ki(AD#TQSM`m&MM3$C9+jsSAl_tP*18C7Sh8xKC1|)k_{cf<}vl zA#VBRtuI#Q46yuiyHvTM8)l|-Y0l@=kd9c7=&_3i2&p5Mc+rMYYvM2sHKcO~hH-@Y z3NpX|CDC3Xh2VaP_9;1pI7zfy(Mh;GO`>~5isIo}q=X$_qG!EDd`WEp-XQ=wK*hi8 z_enyzv(NeE_f8zqsz&7ekwmXjF8b1;)CplH`d=x3diC;%yrVmtBak+@+$HTCr72By zq*+N(+_A~;%58eg@;za`K>-6dlb*M}{r@9<=;4M(>Ieks=TU@>Rzo0q`zbxEXjaJ$ zOKvJ}ZLrhXa9cVI;%BbV{b%(5M!GPyQMi%%hB7j1^=C@qJdZ>_zE0dm3oEGL9IKM6 z*j+CUaq*JsPv+KOhD4XkKBqO z>5zEXco)5wvMiD_nEBUxo4PLYw6*bVaOZYQJtpZm)q%}lhrdY2UC4U9KebBaM{(m$ zmw)&Wx1+3+@ryn6cv?5n5%}`Ik=^`ddh{<`fo5#i{W$cyRWz5fz3B|Dz?z!Ozqq1q zAy?~iK3WcTd#WLBc!9m6ryBCr5oeD0 z=p>Re#7ZiWRLYSQKRXRN2$vU1Iz1wB))IrRQp;e@($=!Bhz^4z|i;!CW$5bc(TBZFm#kdFtx-G z^D_~i$E4yfY8CLW;1YN4{5;uo{-NobR#y`Raz}^6@QV~qUr9Rmi!!euUbyz&1x((V zht0N^Cy(gXmTfnF2*YTbD$t~3omvhrI2t~FRR0DD_%48ccYbXyzM{Lu-XQj@GQOpb zN!VbLm96i@k`NI|mjQCJD!YWKtTJg_xxE5;|9#^tD0AiM0e+pCy-1&0*Gz-i6Hi98 zP7a>^`o#{*<8XiIli@gX*FR(s%_!+QUus-TKT!`4X8(YN-*o=58qw>KY2j_ujw#p- zB>$MR#TAhFAOkn1#Avu&bSNjqCq{7tKmPf>X4c5mX<1{&ZKV6A#3*?%Tcrt!I|X7% zcRj|9)z;v+Eoc7xd3V{u>$>CbH)pcsESRKQCNZ~2y1i3@>r5j089|(fL|nd!EJb6D zE#Q@q7$+*=0xpSh<#|IhH+{x#N>}c_nX6`fYGeVPW^gZtr2Db+o$WDmw)5`Fy>^V# z>X)SZ1F$L`yq0<_L7fdJ@6coHDt)=O@fA8%`opgQx)bN@nPNj0XU~$pU|^T@Vr?MW zTGDHDjA07Y7P*qU^R2%9h=EK1rS8`4V_H;zAjaC7M)hJ2t5F5ie zNw_7ozFc5LB0qo49*EU~>*FN7FDcMa+#nM@G#I?Ul0Qb;utk=TSn3aVXG#7j+7FgE ziD|@nxV~D_XOIm9gh~1YEi*jig-eZC$!Y2yHS+9a{Txco0j#+%$zx_50~hW|%+mLx z^SGA2GV~gL<|66e?~~yh6Wo=mY%Y6Lo@v8fb^~NSjIjZOUP9E7WS~w!|JAsHEA}ny z;EAXEwIBG{4rtDnYcqB}2f5g9ulp^(d8!27CF8;eZbxxMwis-x=wGCR$0E&_JNQC2 zM8#O_1EK~?+}cOJTd;Sa zJEQDX6Hs4R#4>hAe7QrdGi*H*_w0vvu;~t)a=!oCaNN4|vuMAy?r4KOp=N-kY|vor z3VgYVcg#j#D%yS)-W|vE7DE&ucu6c)DZuwDv3L{%h7!qe?*KCH!*gs5M_55`v$-Mb zyN^fTf+j)UM2uN>P_R|-fh}kG8_vW+zulK?&ZpAY@#iR~i9jR(_Iw$f3Y1v6#=t2> ziB-vdkYgnymfL_jD;bd$gZAT|jC{BzC zKCjFp4XeT9U+v3vbsRZu{gU@&G%f@E&1DdpC>d?rmo0%QI(oK0It$OcF>sbvGRD0x zX~dWn%h<{tkZ1|-9MA!OAKQam&GS;<;u2X`DDC|x431q}LL{MaK9ku`CXhI74Uw~w zakVz+0q(TD8R|lYfBa|##92zlZ;b&5hQy(p4cO5n4xW9%1}1S>)fer+861w_3?Pn8 z$Wf4%I(Aq#=KGFz%s6DXbtTu4>$s?(Eb%_Hz#i7AwGWd#rg>3XFWEDAC$+-;4(ZAT zFSHE(+_mxmyaV>{c=J5jeI`Z(K**zH;wu|CttXLnjUnSGOA8?LS*~-z@b@QNu3J}5 zKhJr&ru~LfP)0gn1u=$8{=nZQMapG9Yqw(2&`lPAO~IJc;uyjKoPI5XODH6h!ZDB7 zk6$>aEh8V<#dtPb5e|va^dl|)fuLz>{$dH>|IFnwZIa_(n^xN&z1 zcR?mA7_Pm(+{p~PxR9Fe!SL<@Ec3E#PQKMGZuGnF;qY!Pja=W*mNcQP5;vE#{g)t8eoBmgU1foNLEj5P{~x0KAtzXNt) ziBGZ(3c@*k3NZr%S$n3N4f_MW!^|xu5OOD(6%;dI3yfi;lw0+pHuIz7{$9|~TON3s z>d)jleYd)>{V)5S@UAW#7v~KSu=Pm-4)ld{#*%=$ zG3-l_g91Iyp)8zpwhkJD@^E5~3~_MoS2EW*0Af8QbIggNwc$OO3>SSl^N0J3g-m8FnPh;u^)-A6Yv^V+xAt;U3185U2h1O^*uF#&U66T) z4CBHT=%4H4lgvgE%ndAm=>Dr7}Hwi8Z_fJ`p;zFZ&K!5E^A| z_yqWN+Da~Kr{|mNGXnkwqN%vl(C28VT{qa49yPOzz0=?C>D0Imz<^hHKm3)=jn=*L z{`aO2q^5C+m@fJtmokp6{`Fp`65fF*vDn_mt~+3@OHsnS$<+cgFPf~6iyULOO3J-G zeXiPNUm@8C4#KxJJIK9`&Z@3`eQ^P)3(B};V;BkTNLk7CTC=%k-fp1`u%Q@Rq89_# z4@s6J#jsC2a3buF6v7I_t}g>8gk-68-~LfBj#@Lh=|`*T?OI#l-Yn)B?0)MGa>Mv` zpO)LaZ-#d;$(L@nVVi&}FTG>UK88d%X9mV$N%#PpE=$7+5B=`T)jt2-*mG9jpO6p4 znD7-g2sXL!iwdNPM{gO-DwxR;vc6}j&+G@%v5`)C@)Rz#fF8OPf z4Y~!-OLje$tkq}!=VzTsy-IhhrT-65O9KQH0000800000000000N-x_0FRFV01N;b z0A+1qY10rW9!UzA8*7k73LJ-~GS-8Ji24I6sDI1DPXL)EG~oLy4P=Lz{DeM*wA0fY4>;sA$$dM?ssn$p_msPs zUT>~2L>QvHSBBv&!_G1ndUrC4Z-$s^GWXbjag_=TPa8&vInNNgPNtXLRNcRf@-J1{ zFP)5Xle+=`2lJAf;_^#N)qA z?~r1}IvHP<-0Ea{2e(~O;(5SqGItLzeA@PQ%>ZDCEFZImEC6b`-FmVOVbM9`cTT+- zvulQImRh(L>1G%;=;Q)tz%}>8-39?S_mQvadxt4AW}6JzhHN_>N`yw@LD;Q?V3X-J zKznb<_MU!p5Yw%&Ukx6}c@YTv)Dzbc0Mc#`BaX&ZLx7rS$nGM0%$e@UE&HIU$`ODQ z8uh^WK!rOgG8&;3oxK<{S9Ml7eYXt*%_A}28k*f=*aLU-58!t8#2NMj!NomsGNS?4 z-V>Lw9dIf}J#bT@1OJ%vf=4Qk0KqB!So>QOZ7C6+CvF?yA`E-r-r`v38uh>}g>icP z){oo;_{7;s`sHnE@8?_C7uvxe;ZJ}ut7?tyf~<$$R&+@81sQ)MwZM%1D3!kf6M zpAiFhj3FBpd}uX_P6W|Afcs_1c5)w>s>bldzJGu9VyqwMk#8r%L)mUVeF#1H8v_Gq zUD*>K4u6CCDl^tKV&;G2=fT{{o@E*9zUO=J?_>()gP@^T50tL|VI_xt_hQWL)Y<>d z;>`pz;eP?|-V>$WyX9$PduuifGL2FH0*)LO|I4g_-#G$3h>Ok+W;Te6Ccki zGJpE_X3P_M;`LzT|1t;szMlBwAZNP;;1B#C`S3T^67a`+;$QIOm?5@+FYJl`0sTBS z67VHG@rhUm)|mO*|KI;J^8x?6Cw?B_1GWLa>;LdW{|%!7&lvT<7t8060>sPo#J`39 zeAxkb1*8AhPurboK~KCV;L~6!eS0kPlNdvxzKk8LzF|77@GpjJ_W9K*paPh}jJGb8lQ6DC6KDM~|?br*>3HLM_Pj1>q=F=YQY zQPCQ3M|$Ea5Len0H`fun-V@h}%C@E_PIm&}e)PmuuY+#>U0g$Qp(kC=?Chhho-j!3 z3|VWn%4f`f)O2PA;4*vSx?s7QW%tBww*y>GPaMHvdfgKjI1_MnJ#mR4fRi)ofeVfS zoOVx~Z6~8b9CwjNe*r!qibN3#>XBI_QD&4B=U}r4mX56xoNNhC5-z!VKAj zbp2-XA7wwO5^zO5ah{{#3BUEk&0Gz*hMu^n4S;LyiED=qZKh|`19urZpgWbcsOwCF z#?t$-8Mi*SkpFb{BMhCMx1sUYw_xq+rZB5o&UY{o+^}|DB8;q+E!$}^pIL1)NnE(Q zu)j6(=ZG~X)hjcFM3%8(7o0rBi0^;xRU^A)xdf?oQeps5-$m>snL@c(8`jZ$oveSV z!J4`9XA=Undn2}ZEV-ZRB1c|q>{>cAI>Rx$7h>PH6N$}Mx!DI->VN+eV6Bdl3dI_q zwP+m^7h*8oOn%emu+RJZJMw2E^|wT0R}gLIgf+X0pQ(@-fbH9ofod>G;?$N# zskONzaFngmyUM6eL0m@no8A-~F`Cho$yM_EVsbzILjLf~Fh#@;JxziZe^v^;mF%}_ zk+ii0B|R3iLpEZe+eD*r0PW6YTlRCjSVd83&N$gy?Dlk^8ahhIBZQGr8iJ3si5u}B7bai-`;=n0WP^M_-Xr4*|&n$h;s(>^$PVaEC5pr^NexK8I0-rhuQ)lZ*M>Dgwr|OTrxNyR?;-h~D9HYGv+OT_b zryC5<^SW*sc&y3;%`?zZ zv_T26ayD#0`~Y#G$=*9#w>@8U>px4cgOpDhE6zM!x$4P#vo|ZCRnrKG)A?E|X=Vp` zk`imWI{bszRQ;}D(|1tp&9UTttox8QE0;Z8+aHNX-=%%oY0ZY07Y$kdCL|=l&t`rf zkYvXCo5gUF(mRNWdGJuD1!gU;E?it@ zw;kC$Tp2@pIg(O@6U5{K!jLGEgRoOVoB*1Sk~o<)Q+aDfGi^yv?6vRg7*}fddi~C| zbo^`EN!>MPhi>mQySD~8GskSEyf&vvFE!PXsj3YdyJAZ#al@KllWNSmK&jDrM0eVx zQAhS}%sSJ2EJPEi78Vj>k1)ueq#%4NCb(8CmBmhbxcMJw!yeiF*=26&yfYQ)1>^7Z zr(=9#6yyIrah$Ie%_P422e@`p&xsv=6} z=Mkk#bu$W51OIFlWZr4k0rKz-jwAr#WwDbdrvj$uks(W;c#XWTA{x5+>#*Cl6kBG? zE;MFluYGxVaZt(fuz${RE6188wu-Y?FPQW5NVxC8NRTwbR6-6R4DfequA_CmZQ1@h z9dkZkVZ1*yR!=;-k=C_vAre#jxk(O&{Vn(vZ^$pYf>z{_p(?p^!`79DPS6ePtp<{; z`isrxgwr~CBbmcVLG!jRos_D6N5{ZSnU12PH9JBOJg@wkefiAP$9k4nXPuZx5vGgH z;x^G86CZ6BpHS#agvT5kG*|5#k|g zx!gLjX1)45ExeVsa}Tef}nGi_h2mRn02 zik}43eOZ-2pN2M|jPj zms=!jZbv$NStTY1j%#^Sr(cQlNhVhv-9FNmho2@))rZ|ra-u`GRy@7?8C)7ihWKETK}9mp zU!$yvKH{bh^8WMNL!lGW=jRVP@Jt`6wGxfX&6FC{K~{!rKYV6fbxu@^)!Au49pfO@ z=re?N*y=Pn{WUDDYiwuL9lT*x8anHh=Bt_!OcOTUf* zs*|%s;|8aXT&&p}nf@o_EaonATa@o*QU1w@tEmA*;HCj$ks+}!+d5iMdo7*oQ-npc< zKwC8QQg zHF(^^eQPH{zoZxEJ9$rcdOKs?ZJ>_Rm1unJN2!q=BzaWZ#is@NT8Fg8+?$HjF#oM) zhd6cFP96@DTwd@#zr6HLLbors%RKW)Y30Goe~fRg{#x^P?t<|Y%LlJf>*VDre;o3| z>rS1>(xLJLI*8%T=a>D5_lc2>mPv2hM|mCgG5Y=9T|Ti~v0~W9CNt(5Re+{8Y{kmE zSM3Yu3 zqr-n_Eyg-kl9ke6+IopK>uO(qy^7uBm-yoK=Oj-5q*=3PEuLO?mWy=WX0TRU#Hn|= z(ah+`r`Jy&Q9kt zx%|>)Z3WbEpDZ!E#kC+~!`{55e|PJOMK2yL579rwxj60}WO$L!-R^9i{kxX`dWVM9 z{jx-3Eoa6Ot=S8grraBA{ykxP+v}jAKpo#lLJ|;qN;JHf_CzbS#i(!?Z`m%a zlP;w$&4hM|mkj6LTI^$jmUUGY98+MIkH|LJQbtE|SsocjCOjXY+#KB&<+$tfWXkKB zN1DD)dOjfI=g)2G;aA>pZftTMnK5pG*x5Dd8Ex#^!CN6)pL!Wx}_!SaQ<5ddFmwdu9z2K`rLC};HLdt&u!R! zIT7#NbM@!mzaSdle;^&~zBa6H{N8td>pLCi+9`PkVyh3OLk@EKXQ(y%W6F*Xs+BKS z+-qFjYcQu!T64+D57i%X9@tcDE#EP=3uCIuGZM*egmq#wCqv3M#+E%`$NPTZNb;^t zeOD@cSW89P*s{u=qra=ElZvHZ~i7+p<0A=N0G!|5n%wtxMT zU(@2{Pu)wg%tBey)T9LM#k3V-G8|!|#7V+ADnA;Te7%rdyX!ZjuQ9!^nA}6?DmG)IC|67)V-iu? z;XYvG2X<)oxx0x};-n4|s=2tcBXE_+vd@#|U@xYpiP_F`Sh#OOVi2}Us1q>5PC_Pt ziJB2kh7ox{%*?N*7G1hCiIfBMG)N--5tfOGIl>?Q8oy$cnFoyQ@gU~68c}m!fBW`< zl?;O(&#XWRL0vox{Gy^}%9RhtY>VY)@Yy%^EBZ&FBWvN=E?ACy8IfhvQ-S!G0ZR9hE^bV4eX9(~3 zZ+(HS#JUuDfJrN5VOq{A*dL#cR!t@dSU4Lof?MKm@;UR5h znJz{)&X=j(vSE|nhkAwR9So!PtrzmjEn&`!4n%ruOmOegU>FQ?=6#DI{b9^~1HhXp zg2MahDKdft6+M3ht%mazQu{dnkh|x=xkzr3~sWad0E(o-EnsOMQeTUtY2G_>4;{GRYa} z$Fe|TPuJ-3oKpmq>I%;dL>>q$DrJt6?|@hpQcfHJ5j2X7pvjEYYuw0kgqJ6h)j(LI zOnMWHFxP5dC1?QF)?6jmfw1lf`(0;f4SlvzYuO4i-2E&6-1aMRMVb5XBnenUkDnq7 zfHiC~IYxp2vA+2vDFzsx;!C5z@Gd_x9~d^wC+A5l6gFzJE9o}b91}`r10qs}H=CS> z!q$0YIoSb)ZTHxhWIC|yIMG7p0wN}b3?AWKO$REofhWIq!@=t>LM)HQTwEPb7)Jgl zrN}h(pe&GK7>t@?o7a;>fI9=n6ZC3#mR8dN+m#qfW&w;7W^u6e4>k9@3q#o&+^O;A z^T-xxJwBe~k|Tgf7`2BiL-;Uj#IHs=ES-(K+36!MhRvX*b&68x2 zGr*e8hU!@SpsXhvc(3^C-6G!;w)eKpYsn!p42JUQ2TqeW0FSwF_Vrk_m^!-03dQV? zsUYjvJFp|hvc1JP+;Vm&q0BNV1iJQ~Lul$Dzt=97yM3Nx4*Ik%k*8)j-z17eAWZ{ zYj_s76JBBp_}soIg1M?9O))tS4_)L;))HkXoG&2@$w(+%7)W>|9#~5B#*tTmC|SXa zWgj&`=~5ecO;n+9tWvPZWt9K3WA3Ucz>iDrTEjT< z7T|T8D_R>~_hK}TUyvLAJ6*HPWcaa?jE-Xpz&8mRzQHHE=@^!$jiZi3`4=CmTW-33 zr$K+kaIsdG4Rp-=f}BZNyJ1XCT03^&7np{}AfO_IXGSm`edmM^c?s~Y+-`CM2zL*V zjdc0k?=y~IFmiwHZu-#o>#mSX03YOiC+~pqpJcL_pojE`JxNRe@hBT=vQT(p!nTkB zK&c$~oNXV1g|-rHo9obbxrh7vIO9(b*l*mIIq5$ldAwnvnOx2our21-4Gu<-cxxON947lA>h`~ERkDU{^`XBzb|X;2h8<&%gQoq zf$@FbNrJ}w!_2SLlc;q(MjyDg_9E5BAJf)SUH7SvOg}ng--esBe{2BrZ6WUn`}Zo8 z>STGPWCze1uhr9q+H`ic);S|+G~|L@#h`!K;K?C*)qn3E`z$NV90mHXQCH{^{nf=2 zlPiGuV-l=YsM#B4QYyC~c$nhak-PVe^i(^9O9^}Vr^K8@BmDNAfF4q5t%Pa7yj%NB zb2@SD+kB{AYyWhG`VAc?o|Buv*criA(`oAVGAC$ux?_UbM$*ZsGCUV@3Zc7?`eQ3) zMm;=gkE{o7_I*7%x2n9yhhH3ddtV2Rv)?+XL;k53M)S#0fb6jo5n6sV&3St*YF2K!Z2Ets%c6I7wy z6BpzfhF6&;whgStc4K+n-M+w}p1>|9+n}J)POJ&+234)0FlK$ApnaCSAt#~GH-W4s zo>1s7N8V_O%t6|Gsoc$@C+&vJnlyuZ3Wm7`3YgK9%aVWl1B+1AtjUeM0LCE> z

    fkRr4=CTF-Eqf4U&Im)SL?<9_f7te^(oJPyCbJYF>s0poB7bIg~i4qstT%z-dG zg}l+;aSdu-z93hs6)|i2yMtCQKSb;(X2I{(01m7Dq=cA4!Dce4CL^F=XUaQ-ib!?j zt=;UmUw|1ki?Adf3ZuP=hgK4c#nM5X{78yAbtfU1l3cJdWrx(^u*0>-hh z5RO2>H4@fjDNx*Ee6(h@fy4(Fn4?P zRQjfw<1kcE-E@SUvh*V`Gl{c!o;#$a8}ct*dDKtkY!=LA`&3y*qpws>dU<~76_?m2 zt-|fuI7VRdR3pR25m4r;k=NKm>_50cxB5Ir=SX$i&|vm-BapV8B4Oz4t4166&|Me3 z?IJp{syoJAuU3K-ezj3cmYMD)isajMl=V^TD zfko{&TV=z+r?49`P+|8G&<1)?h_55R&}&gmc+U*wbjFe;|O!mQao@#P?Fk3$j3Wru#qB&4aalb@f06eS^OK_eK z?<2!V85EA(0#m#l3TefxJ=$8;^kxXDq`;f8U>s(YRWowNkrIT>=6JbK^@N^}mdP-9 zq^1jUR#7j$zg};-CGF?<5_}}=+=-N6A_MTG+DR={|RM?y?pKFXdr2$$52Shc`mA9H0LE%g( zvDVs!Ewo;cn`!W;{qpPn`(LNuQ^FR&@lY*LevZMbYC-KdauZ;oX)M_a6$^`b%Q1PQ zTC7(nzbvRlSr6Q;%Eu@-1El`)4PRuB~*}DKVo#-@|&|f!(8{VZdisy`y&>{ZG|9U#fK$Z=^DHn|S$gZ2!da10Qd_*WWUe8N#57yZibT zISU=TS1PlSoCL%phh5|&ARgTz1K2+hW~n~@o`;9js!x)ANBk2^g}sR3-Jct6HfPSc zWBo21c%IMbGn;o%ienS+R@h0i3p)g(_g}IU4&Rn|T+Tcth)U>eaj3KCw%N83O-W^|>k0AeK;g;Y?-`2qINq z9;qh~RjO9S?Iz#~s#f>ktFyj}_GF17Z*s#LyQwD^`>{X9DeAIv%vI#Lw}H3-tZ^Sk zwNp*RRh=ylXbt5C-oz!t99}NIu`SRyV#-6jBnFMW@*=SqDyhEyL}m~H)OqW$m}nq8 z{s+BY)psdwI-%U!by&-~Eq8G=KAB&Wb5UjjUYZ7o#FfFLu#&N z_t%|rI%d;%%S!98L6*Qe9$0FJuug0vuI5jx3)z+`D10^!Ck{aRe1be9DDG>x8{Jo5 zZw~AGxj$`qrvh)(%#E(S7YU9(uBlzWT83f%3M>hpA#9;7UZvHxY6wx=#)4?{1% zs(yK^@=NtWx(45d}r7GBWWwG#_%%5sag&j?%(u^j|rsy}P?vOlp+Yxco0PgU(iiYcBbQ@b^Z#G#l5Ik5HDOtpqmn9G3o?ENdIE z^J#_i0t>ufA5Ldr!B3uja{nn>@zS6}TV9{a-U9(kbQ^ zTtGAL_SFgA_Wgb4%&p*E<L(lK!4qH443d(7`-53w?`4%#F@m3LEtuvOufb-R($Fm)pSa@c!^kxff|8fJ* z*>vLWMK(s;SI5bZ7Qv!uLBD8xMdUn3iR?)^jyJD43SY4YWdM<5 z26#}){05$3*4@1~=XJJR@zOu31$BCVp_GsqUOL}+UvHa=OYu8h+Wvth4qA<`836(C zNa&JQWCjMpd~KsF_G>s$w4V?aI_)~|{m5-V^mQQ_Y`s2|`u!x|*airC`2$`~AV~lR z8jKw={xhd$8hB%-9#7vB7^GG*cC1Vkbm0xxg%iy@b%(5U&vNrWL#j7aoM#BL2c!*5 zBeFW{&QO*xMPBl$?qj8Ua$F65EozH~UVR+dD?_D#F2U3dY$-2+`Q90*|y=m5S^H17&=gWj6D9z%DZZ~B8w z!|V-zh$G3s%mCl~lNsiL_!eqzAWm9OiW*QXMo~&rPf8l} z;Ld1DQBdqb=>v6UO`;S%#s4F1B{}FtDF!_$8c^p7M_KcK>l|g5E#Ve$Ps&+%(oe8_ z?3DMA`v{M0@VLtP=9^-(4lEd!zXYVWaFoEFl;uDf%dLcU4ZM_&&zBS@ zOzIk-KkB}|QZMua`1Z>tl8Z2f_Qm839;xt0E}KKdfEam;9K}N^{%G(faet0ZS+AqO zjmYE}UcrgF8@u?S>WO|IYiz+HfG^K?9Qp)L(fATMH%tcbC6jr<)W|sBI)w>;{@68F z$u%IjSdpz50Q1L{E+qQ_y6UdS>@wf=2#sg?uAj(lt1@oU*sNvo1ues>ec2W1PNMhl zNQG}G#jYpi7rrU*25P&1B=2Wh6B`@#zW}i5t;_7e13}?1NLlfTTqN~WC z7=N7{gesG!gPBJ2JjHVy9wPFmq>;gF^FSy~Ehoy<7)^WJh+ZCl`i(i}(VtLy@9CPn z#5LzXXN^@I;Pm$Kh`UCFk=F)yb;^g&T@gzzSh1ekXH^*{0L09gOg<2ZX8F?L8F*=w zFRl8C(JbHd)-avyKlFhdns}4H^6Hl!KBm3iAzSY>jB5yR%<8jOXCG&`0-Jb)CatNT zll^I9;y-1hTyd2m$Bo5A4jS*FT;#WrWBK& zluDp{1tGJG+{gid-i#%|Ve}qK)G-j}&vBhY_3)e%L?3+_{@i=_G4SI1Bwr^}fG}?^ zd80E#jtaL@;PqFs?FjDUx2n0zbtdkk-M~8E^A!5%d_S2DBp+eq9%^_NsL6I?mYBax z?fQtWBV6nj!`nH>fw%na&wVDp9d3t`45mJUQeql;@3xNQHTTo`ekiBu+-VTL4TLOL zCg*g1)zdN5ZRQ<~5Xwxv);FVUW7ex1c%BU6SJYVIi7MY;g=Askh96MuODBA#cH^+f zBvfyz4!8M(1afjK%tRHR?Y!p{;aixxa*TYD0xx#S0J(~&pp$K9;+j1GFAp^XNBOV~ zDp*L3C%r)SDhpyyA2Mi#8~gJZV1mw*e9YSM*DPVpF(%`$OMFF}4_>#2YzG*kum{~x zen_$<9*^)t?p?=xA3rpnq+%|gzuw{{W`ghwi6`wiQhNu9XXF!EaUk-)Jwgj}NAR;P;45Izpl4D75`EGd)(KqueV!Eau@UG z+=pfdfRG&_{8%+hvK$bxF0kZYLt*C%VoPoSW!H%fIs!gzrf&o9N<*0D!su~l z4+`GgSOU%D0Y}^@OH4BG;{r)J`38k}E7A+od;C3Pa_JV_)8IyK0U^PTY}C1}LWRw3 z;!T=&L%a0!?p2k(y?zw2)=U(z?0w36hY4B!!6Hk{y6_KvfssKqkfgMW&SX-eFYy6F zQacPkZpK4ap_n@2AIh)BTj2bZomn~+3~eMxk=HbD?g06kqJut-FB9sZ$H;Mbau%73 zu>Csb-S|f?gH{5V8aao&LipCt%BhPBh8uY|bDal$`IM=8*wiEqlndj)QrQoh{vV~$ z6e_2MCQzMvRGCcCF*W8+;$wN2Zp7>6=YJgc&!z?Y_b}_UJ_Bod$V)t7b z?(9oK5uWfR5Ciiw6kzhvA3u2q20o03%DD3w=bHSin^aUCdb{|G8CR^4{)4i zo>6sswvt_sublGFP37qjLnssnu<97S^Gmj5k?{a84)JAwtiZya`54agFE(3}g}_;A zz&BM+?k7~4*Uwcmw5Efzoiz6i5>o}IM_iipl~~x-9w~MxMvFHEEx(9_@oU_ zjrb3h$OWC=#&p~sH}G<%Y#({KY;N6_Q7S|i#_cAEd-VDgDFgU8<`>>F<3EWY)7YO7 z+w-43AvV-is8kLmj}dNLj~QM5bCyKmAtC?8wo?R+;me_~bUrSlqwu(q*V>&vrlPPg zN_OOvi|FiZfhtud9 zI;x|e$?Fq-m!Kmn38nP!Nl622B#%;t_M~9AlEG=uNj)i-F#?e1bkzT+{Gyvr^{l_b z0X;pppDoQ%r>)L*7s7K&`|%igt43Z)mcQG(zKN-h+eW+pfrT;)`u;KKl+Fq+r#D%V zcW?hqw@Inl--7NkpC5wEbHMS*CW}ml$Nu!5MCq98Q`S%Nyf1+=<5$-v-(t4B-?{)_ zZw1ybn|ukzMql2;O1J=pZ#As3&SB0Rr_1xckJ~Zv_kG)^!=2~;u!9b`LY#6HluLseTFCZF1LYGeClvZ97@27FTM@#8$&(zOu6BVKP zA6#T!^#3WoVoJHzlcEF^Q&URW*8KnK^oI5ty|_|mP@ zQe{ck@NaO-F-rb@WDiZ8{IPH&R#5Rz7?H>G7Jh5wJ3JWRw=Dp>Fdq=@X;(-Az>ayA zc%y*dkx-6@P5e$z)?R1Ve7asADDZaOI5wj*Gq79XkZ}0}jCO#fE05J8VA}-DEno8a zFC!o(L{UdW#N`j=dFJ_Vq>7^14i+{kP59Qd7qGIE_OL%^Vc}gou4w@;h$P`byg*hN zw6`A=~hq%=gsGw6&Pg+aP-OvWjmBXys{x_#p_Q3m4Vg7 z8W1X-WblX^tyKIN1s=Pjbgq1u+Ssc3))oKAz%JI{GIxqSDEM6ptjwQIzU%z`PAj)J z^Cr99^=T|s8)-RBf3^n9q7Jkp^a^F$gjjfX6D_Ggl=2)Enm}#MDZE@FP#@?^4uLH7 zNSK_rQ0Q|R#$*!|G~TjD$tYmaj{HfM0@MNhn*;=%bapMK-vs?NL;JdOX{_c(UdG(c zMf)vf4eFPT{r(wxST91B+)^}pf&t4K(M1ys^nOY9gN#8|tTFing~9XW-r=1Gfyo$O zJkl1J94Gzo?w>$>jTPXbpup@YY=Igmm>ZFw=$H#E`wYWqNnq)oh3AO^%Vtng;D-x_ z4ROOa?F7R@-0(FV!LVyr$#Y;BzJ<)t87j-=?iG0b&MaKpf8d65=21>tK0#MOvkOKv zgIkF*th(Gt6(Fo#S$}jm1-3AU3Edomv2~!{$3bCS z5KP2XU~$dcL-qo~&5_(i_en5bc`peE#P~FJHu{8uiJhdIZNQSpU$SCYpEg_xs-3+R zZpUoe_9y?sn&)X{?{DDi!#vP{uCct4uXb;X=WROb_FOep8CFOnNS%_h7mtPoQ!PT- zP6LoN^%Q%8JOE-qoxgy>jH$2}u-7wTNw6)(z$ASahbB(o83?;huQwgfoz1+RQqS16 z?AAVP-jvT*7%78;G;1GOigAU&t1A>wj0LkphN0;d%+C3RL6pEd4aOIjrjLpb9TT4b zUvdrM!&h_y=H-D6vH-+5L2));3@&vPO!)#l8Y&-bCf&hIB5($`qnJ}ne*99xD?L`km5IFNCo|Xz$S?t9dJ%UvbP7FZ!`=!ou z0hcL>;l*runwZd4FjoE?In*IR->%}Ri(qvQSf&C_&6hRtHima4x%~3#aGRf) zR4B(VvcS65-3^c31Z(&0@%TcQiTq`G-sndslPhCJYL`ChP#p+D_q9_>Sx<@)P)a#U zO;3sg-e?7>>(=bStcD<1&X*`6ToS5tU^eX!zlGP)IQY=Q@Z{sdhm&Tg!)9ZUqtgxF z4;6&CtjCKtf{<&cFhCK62Cz@?(wrcC%qg9MUQ~p(0`JPZ1BcJ&EPU?x=J7XUhJG4F zutDJ}UAG(VSn33G)-N!Yr!H%LWU*%P`uB~a}ncPu2sZr(c^C1zHg=4Ut~%RnG1h_@kUFmx5{`8E-+C{O1^ZrDVhAGG zujGp%tYH6&Q#!WGY4eVayyIaGIR=}Df0{PF$PF`-2EcJ(s2g6S6C4N!U2F}AgAcD# zjhC2^MYU9t1K3oETm{LMpuZqK6{HA1U>;47^4kyNMZsY>LJm=L}Uf@ph>M=KER{ zWmlA`C9^QN-?+1H=UCp#$*K8$zCE9D-&17-YN;VyG9+*mL&gBZiAdI5$I6>Kq;KM#Ycnru zP1zCSa7AtMX6R^NV9mT|iQc&&vui!x7#C!PxnYze$htua@RqgUREQ;}iUg;!SwlRN z5ad1%)md%GMQBaDImXBkoT4iX1iVj=FBWtYF5vr zm*f2&!PzBlLyxiP%EBnAo>RKRO@07}UuU$&z!U`o#LCQ;LK znPFEE#FPb*`{WrEt~ru(BjUK+!U1{SaaEJ@>6ViM%XH;mNJw3;p4&$R)QC4 z`|*N?;HA}dG8WBX7qpkC`2N38MtDDQwAZJ^b%C1Taw-gpAKVN>Yz}q z*n}Rupt|#qPB(XxKv$9XB0%Gbk)(JN|K(*%ytv{It-n$sgE4(5cs1_$23ba72)GOR6l)M02^}-6h3!BoIMRF-zr1#-oN1cK^P4z%|>LYjuN+$W-9RZYV>)h?781R z^MNn#M=v>M5lCyv8LzWP$emop@UEAxcqgatP_$~tmvI;*t^^^!g{(dvdJ0-?e5tS4 znoJbgW-lnUWw73Op+wNh`q35DxoA9P?L2FVZ$=3^YcJt7UcujVc0AoP-F3sr^G-&{ z=>Eb~xRCTAn=u9xvWejELVzcfRauYoB$SQGLhnr|H=Int(?g;BHaCojY4(IxKnt!Sl+NY+3yodr|meX67v-mTr6^w`V!DVLZ$xTsKA;K z_F7}fHvR=nuamN8$xa#Jgw0fpNU07_kZmx0a&<7V2I0!|VA%eO)ka z5K#yXPQY$I1_h%btU6^iakj+EX+o2AFn(%K5Z?l$0lG+NCc|prvAfW`?|M4K=841u zJvpJp7kl>aU?^DX&&P{>Ld)n_^`Gv&nUM_hTSD9Z_H5%XEX+2* z)ho1(2CLi34a?Sz){~7DgdVkF4rdtM#!mAqpGw%3zt1Q?K#rWSX`1 zcz+y^IEed%_BVsk3j^ccoOAoC{kcX z%53EIGrBxv9dG3~#QkZ;Dw`*%$Gm{f1Z?E_Zyax)IbuzSEO%X4IHoojzf~f1@-84x zp?zmQ*i?vvg=1?f@Mf0K<#>hh30wnby;-Ll<7{rYo5^Bjr=c(eb2sw7&J){Ol!a6w z4m7cF+&)7x6k%xrNd$qefn+!Ps|7Ib#K71GdOq@PG^5z6IQn3VXEoM(6$N85{VFfJ zy7%bXq2*`jgd+cBL*ps$fd9QJ@!cFcdZ6u`Nl2>-hsj=&&Wl{}adf~^C2u+*$e(6N z86?LUV!ekaB7#mx*66FO6NV_+BG}% zM}8La)&gzXq+pB#h0|_&ux;Qz2&ZeTV;gXxPTzfm>XjKo{4fv`&Pebx=Cy#7>1E6S zeyVThG=EON{D#Syn$8=3=(cZ=hi98Cm(vx_>|cN}sc`1w0(EHy49b{|ywf9E7h1AU zJ&*@ml_ncvcq)`$OQmbfbABk+h-%U|I0juQ;e6JEDwp|F>-z|CUW2akysn&!JSO^quSzh#Uw}A2 zDJzHGuy7%4O!X37U~|06dzGsgn>}xQ0J4F*CR|iykGW9c;yz@wv1AMMY{D***4DNA zO|m{!4iTm@%t+^Jyxv=R@pEd6I}nFYwj{zH-)a>uDe=JUzi_EiF#B5<3sH9J5!fmB zbe_x4yuEAlUJt`cXoQwNCOoRkmsQ$Rov>V($~I(Rp`eP+=8Cv_JZ=^GM+CE-Nbvt- zN6l5hP(Nz;0uC7Bl|^B|We@bDgez4V>0%0G{P2Wc7`VWmdIy1N_INNS46F%8uSvLS zdgI7&RG`l3Jo~z%nLe@x{@AS`?5aaLFVFEr=Bbi6FT{;O+}w0t?xhczYL$bV5cdIO z1VtL6>nIG$_At)F0ll${jMlxJmHZ}qAMz-f!&tdolLcWDiFTD z7MjP(;a?0fxhUMgFW}$}dlCUKg0aWdcM2wE5~! zXP1Q_0r9rqgk(H`1M*X+xD({Hh?l<4=YStk;HXSWfK?fxI zUMwCR3wLDN)REVh$le;ZS{a^BibF@1P%E%qV zy#Hc#uuh2>PgR8nKZ)^>N|-1VQ-zs0N{X+K3lk4;!g%S)zg-O4o%&NEn zF}=;)Gj25eSL(D03Wz;YMlQX!H2N2dsv-G=)J*3A?VqM5(LM6i_+M(aO)ta-JRpr8 zg{J>v*TRe(G9kk>3Z~LZ4>HN0=PkqjC2s|hDQhHn;4Dljio!z`;bFTdyfq;_eA%C_ z!Xu{QAy?yQ?cW;YsQckj|DCtNw8`2B#58}(=zp<3Ay{c^B!-5(KJnj`{4^P4mX$i9*NHFCm zJjLf+{8J7|G(J038Aa(iUi9XOFek-d!|pQeNpU8zFF3sW-=3fr%^fIp9*|9`N-fN@ zad#5_rT#NW39%^&L}2K!_h0PSZ*=GnB#^inv;T|jM$aqv;tHx>^NgbK%bLQx95J=) zc`u}N+n=`Z$1Je$^bvomp7WI@c&S&IKgpkN_WaT)e6?11#!*TNVQ|hQlB0O(LRerY zrgK-YNIb;%1YNh@xYe46M3hCd+INp=4v zzk@ak8M0QxfFmbAApQFNE&K2DL*;Wj#b#nlN~ z&(e?62+zOvr*m99Aqo?u!s3UGW(f_nU-vS}P^s64e``%xMzQ&sBu<$3U#tSuxlrfN z{yYSQk~u5z%^+dP9azg44P1PDT4M>P4|XPy4xd4d|JJ(()Y3$;#-}VQHRb_%HgLeN zf2m4Ht)kyI5nk#mrIUGSjTq07gqPC%@km*CS%sX&4>Ag`Kv2g+2tl10z_Q9%b(gQb z82ymGN=f=jg6NR5^-OR2$qM1sbsp3oxeC!xCCp6 z9Yei<8=t@nxDSP!lfVpY14_lz0zAVN-jYjIe-qi8(Kk_03jHY6O7r?B85^oV8aQA( z0Nz%r#{+2L?e?j#4((D|;=kQ_&7=VAPD705j+ z+WX!hz)w>ch5Gew{8M{ndOT6lGFw~8nWMx zQD7KcEBA^GsmZ$c)`RZd`xdcmC*}(7Ut;Bq?_-aG`YP>D%+S?Zckew)Y?um?+WRYQ zj}99===z@!Y#H_5KR@j8yOF|&Mjm*}Ncb?s(70?Q)HiV>*(Zbb6DkXjwyAY%nsInJIpEMtr6FghY&{LQ$Eu4qqgEvcs1J{1wDB@i%eyUa63N9okRiw^Q04^ zSJZ{iCRb2*vQo2xI$)J=$uaf^N_(+&5`C^0Ph&CZEPOdHn5u@CFZ^f}ROR47RYBEG z4{E`xYM$dEg0Q;Ko>(GmNF{p!*2LTQc@zv!Jaf6?tmKqYY5dwaeB$nD&|J+sGM&1) zudQyAdw_VufIpE4h_|NTPs9V}Z3c-muH6H}5I2YH*ze}1-t77u2VbQhVf2*-D6Ks| z+s)o_)O-|*HQ#%C;9Et)_c=$6FW4(HWBbly?pkNLEh)HF%&0Jo=5!uRe6HfGS-Eb* zd!^N(EL?^k*1_t-ajOkWW!o+Q=A#bj(?`_~nx3>=@!;l=Dek`sS#eJQ%dF#5NaDN0Xf%(`PaWgG)5w*|-2bdZv!;N+}^&GOCI zXSAH1$Aj5s^k)C|gJ^dP7WU645g`6=E3AjUfaumMz~fqB_Y1nbM2wm}`GIhXy8{}MC7==abpB99nn!bcst4K+4}dofBHZqI00~; zgOsj5OY-VvRx}9v?Fsy>f+5>=77Lbjv^CzqR1AzoyfQ=My_2Alwt-Ucy*plc^3&8? z)flE}58&bkDE+Lv>otF~{XJYHu#`kH{0fX%MKUjf$r^yNlc`@Vk`3|W{$Jy#u7az< zVvy3QynA!r_|Mpky+Gqhs70Ja{s2AYCMH?z@OiJ<@_5HR#^1vDw$*(Y0KX?Ub zEBY(V2+Q(0Jbl(Qq^*Xhm(OOGv$HosNlAl*;nj3euY^?f_7q?rBCix2nzZc4!mGL+ zzz+Ldr0n8@K8Hy8#49}L7xmr(k?{fSo7z^HJ>9`UB?_K%a6!GV0rjJur z1_YZpq#AQm(U%fwbQxm&A<{HBiU~fE=B-o;wU$712m-W<;rTX&LaSp6Ns`aG;K?;I ze%Pw^QrJXs`bvkhF9uh4A9;b}83Pq`1`>^t>(Of5G*HUc4hd2F;gEsEJ$nIn{G{x( z9U&oM*R<*oH>I~ctSl|1;G-cSFK&#yinxQ&dfzpEcp4FsUyZ=*?bKk4Gcxny1*3l^traU_>Z(&jpBsP1(Qr>Wav~Bvt)T0W{yYUEzFk zcG!rW&v)X4o&Y9;*F>zoZw@LXL#2#^CF@_u{w=|Ne1(ce{XNhUii`q+(e{gsKAl4? zE;3F@#Vac!69+O)o$Up)f6G`Y$#8$!^voNWf+u(tR-=H zW-PLhVRZ;H4aGNbQ1r0qSh@*^tHt7(mS{LEhqVAlI9907ae)@{auwT>5AGOnczOjs z;3%kU)tCFGfXG_UkhlZFdIBqtF}lchx(D7z7ug;sJoR1)@N|>%6q7FKCZsIBI|HlZ zVzDa?Mt@smKim(4W08GgFy8GDIq*}BeQ^D^-er?c2JclGDSPni0N5_JVD~=_P@0h) zws%_B6T83gpl6UDwrv8ykzggo07rpktwtE`foUAksC(6919WC|Cb^&)6eB8YOaS4W=vBInbVty>m`1p2CR6+7#f6{i3ly$a>Zql&fgm z(kkO+=^$>?VTIeXQVu%QX6XEd(Dc(cB}RXlfl_ip%E3D&vu?J7IRp1wAbeU@#ng$ssS#B=LlK zGjZ*F(uMV3nyYxIYF5&svv$omsNmU&CVooA%R-__+X~Q+6M5MAu?;0yNUbuK84H4H zFUxHgCL}F-=$M9;jPqa*+|Hp)xeSo0Y^b!qDJ5yqwa6|;$S`k^ zKRcD~{%P(W=tGI7B^w$i;=@@jR}9;HGD)j++7hh42jX=7n0`tc2TvxoYw4{1tVeGO zi>7@uG`@Qh+H065d$2Ss$=K(m9*(%hHaXbG1C;h9WGC%4+cOsX2a%g-I(U$=$AQ0D zUrFqfmz3c5=^R?PfAD!4`YM?L?v3LJeH>7nndxT@@ve|)dX)#BYl&u<1Y=55G$XwN zzsV<>xeN^QLMTYH!3*&Kif5i59w>`u30Wug0SeG>}BA` zr31G&xag=Jz1!_E?I)UJWv{M>yW^9b(uUyRL&?IN5iKHmA4W81n>{r`bFSGNTLq!v zN+B`-8gyB45#51AB~yM04kRupQXfxdnu!l6;CP zFl{877vX{7zi8eAL;PBnXx?voeAQCqJ2Y0kKIdvW7r+038o-_nPS#o{nYJbM>^AsCRIg#b>uu%oeNJC zkhOqVFdpN$3-=86Z7JA+jR=c(u^qS_ zmw`XG3*qBk=mv_G&x^$XL$v%yEM5i>tq5U17;o4DPce6^OlH!KlTE!l=i*Z=ZUuvw z&`(J-A?BpQ$yYyc&cW3at=vLgE78ibRJ@QZ3Y_P`cA)_bJQJsGjT^?sSn1%5k0*!m zN=wo8um{?z>4q5LiB^TP=IR&L08{@w#o)uUvYKY(F2>=2s86)I%$_Vp*y)FNG(>CW z6=38hTC3(qySz4}0Np*&I@1c$h_Kn-_>v>YdU%03H0yTO>;cZx8CXJb$G}HR2Oll; zc9!(QnEfap^22WB)x!I_5W6pDf$R^ummMIkxH)OLnc5?S=pMWHnXbne39W7YFe z|Lv4lYCD*ey=Y~_m*228uLJXX3w8jlyF%Nb|s4muN$SJszBiB24Yks})5o zNu_#nBbdZ}P;sLtnbt>m7P{>^NU3>NQh{IbR0SOH7Km6xo374Rcb^W#>3ND5E?hjf z%(Q<8J`=d|qRlqo9GC)AWTCP8S6ove{ge{+CtX-CsV>27Is|D=!R(e`C`2uvB;ExM zmOs(gg)MfdL(Hh+-C9*Z zA^~oTOI5##?Tyk^NGs!|@m(tPqllvIV@~6drf55OF#Z6eBe+T0G1QMuLao2! z;5y8IiDFdh)!BS(SwYF?-o4wq4y~!gmTvyShO?UPkr8c zn1HHfiWvu!?=6i^e2qVI2eFlCmv*fBba{}cvRv^(_se_HLuVYvr^B(|RT9j;$HwD^ z&1e4}#lk@YJlGJ$T`rhXiM296v9k^zdA4fd@e*7g3S+=lh7VBknvwA=?~;cUHDdoo z&~kCE;_dynpAEj2xE=ZKY=OFI%M_DRZ$HZ~2>FA!SdhE>S}@)p6~)`xs~Z=gPEb&K zvo_`ViSbFd@hNft#>X^L_hwIcFkZJ4?P>PIiyfi_SP__S679_~#1y}1@6%xXVyI}} z1mc8oq-cM`G8BaTkAwUjqZ&Ffi(Ay>cxKXbUfM{@% z5h%N)k8>A}#rJ>kGb5rzy!hDk`(Ql?(L_l`ewg?YCGE7wNK2G-3-mee{^Wc1bgv##phxJUL(aj*wrHUeMv=te z#HP@u4L{N1+?ECHJ3LP@aeG#i`pRB(&le+qts(WRQj{U6fhbXQSdZ+%Jb~y)Q7Rsz zic)8$s+&+PyIe8zWOCcf%)a_q8#P0kQZP|M80n`j@&ca6xT?F`zD11i`d1UDwL`3B z0!--!_H67rEcorDo_l(Ohwz?G9Kre;GpikWA7~P zg8rjlbPRU#dw|Cwjyj90>$s(z`tcGFFkz6%tfk%EA-Pr-sEAdNwrMFf8ppqhXW5_T zzQ}IFuAcUexoa~_D7k!d?M-v;*ADW?`e7Z+l$QKsCi^}QT4PFi8?4z8#ct%!jzcO} z?0u3N`%vB-TQ+7;4q=uLOUT(}I|IXzi~yaA{<-wHWjy_!cu^i{ za-AUO^yBerlS%m+cd<^bgf7LC1`>_>JKWcyt=YrhAIbNvh|zR$vf6qXsS#qTK~H9j z%_4@=-c29PTsvE<(DCzJyYm?3_q&5>3*T(mwcDB%wqCnCSfS?FX^b5*Sux(l5oHb$ zubjfweQn1Wei)?WIpW21?nWs2REL}I*8oh)k^K2&#$&Tm^F4=JjGU7vv0dXmHNyY_;~dEzL#{d)m#4T zuk}#KvNoaXFl)X9Pm4ua$Hn-;DpA%$spfmGsj6I(x&BM&G#9XepY4pT?3$=wxeHH6`f8H zQxkXkvIH-GiSikGdLhamBf-0uqWmplJdzjX=lfIqnf_wTKa0+|OElihqmLbL!(R0s zI56lzWU0gN*v)8r&ZJAX4x3Ng?thvDs6`I?u3b6l?4V13ivzh$a+)~tTL&$v+<2jX zb@1Pm0E%_bBP}cX=#EH8zO6og!{bgXimk~d-4{pd?hfx;u;uf}Nn-A|aN4cd8hv-& z$DX@q4sUIEV9#xhA9+N%aJ+8xKSOUc_mNJS27NgjLBH-QI$I_Ve0-O31=z6RbDrwP z$oBFO-&;Jv&V@dQvOVL_Sfx95QpTkqM={zRv*HIIC;uF*ZyIe37>#8>jZ{@jnU z*szmsH0Ul4x9orJg7L3y-c-UsYgX^b*um$YS08Aw%JHqkWJ$qnlOe;t(GT;_wP8Ed zX6l)5AFMJkf1AKuLMu(}Aj!w2darhk_wpDm8r&X@l?ql+&#Z8uM8p3ht=#uCG1SsD zRQ0{Tt9DXmH`>6$5V2Fd9`~D4)@<7RV58FgPFL@ZzLIYLj$$20G4-v#jFKIX7DcYg zuqhuz7xm*@a&Uo%@uG1PiatyUz2T0MK1paSbaU!zlKqHJkdn*knzO_QszN?yZ`Kl5)hF;n|eh$Cgzb zI#IlP^_I2#8h7izzDTZjq|tkkh1hIkJMC=W1g1u&)bxx;+oq&!HLsfmw6hPaSo1B9 zOwDUcL(+#1G5dyfq9tZieQBL=TQ=uvmf3?HNtI)CijG?z=k{ScDV95Bmi8_Efx!ED zBRYvizoXbz?AQ7AGpYM@{;|}|$czdJvSw?qvzC*>6}DT??(wmsWCFtZ_br`}HFmwy{jsW?FPkm22wza#Tv7oom+Yk|po$etNF_kZ-YKB9-4%t|7xc$$~tR_}kLqi@}h!`y)!kIL@UH zq)t0I?Q?H0)8BgT$miDLeI=g`RQyvyC-%2B>({w)WcXp`NRpx7nh*^tDczRc(Jyu6 zU2XpO?pLne&p^_pB@#2&ERO0#icXMG@~PjRJR0Az`tDRZLTPzqW5?A|i(fvdEZlcV zITw78OO>P_9+!zOH%n&ia5{3r*LS%Mx~ppxZhf`Vv8~+J8xt?+g?QE#bXxgN}L$Kfg!q5 z;*VEiMP)`}jjh>~TRV?@*cLt3aLMsN<0H}6(d{l<7G*YVE~ge<$bH=mmxY>tmK;?0 z(opu9K1x|0iLEYlnOQq&Ls*Ng8|J;PYK!rvn&_&F#OZSft^0Nqb8UK#>$Zt+n0xge zrG4D!fL7VCuU_f6%?XZwt-!xFbt$)D`{$AD+Cgq%GC$MR7EKbP8+`SSnC-%N`syDO zjTl$TR%gYwb``t7c_LkSdkrtm45-&^%Bcr`txKvgi(3T)Pm^*})}uq_box)5*7A=) zy}m+hcF&bFVOH#qL%x$uhSch`HeHF_ew$(+Td`N=)=gGBVA$6y^6(7wuCL#S!rQo_ z8?N*?SahREV&=n{!37;8tfpq_*Gt#0xJ?ez?cU8fJT~l=*#XmDt9OD^@}ht>C9Dt!dho2Fu#VYum8&bD+yp<;le zW^YdIds(xtH|~210_6FVPxO-QKrg!@*u?DH4bFWS#eDy`&`a;GGcQT;(Tl`Eq=Q`A7&m*+F}ErwUvGzgiQLa6*|G~1QfIq8 z6?s;jpLEBWTY%cOtawi0?A7+cpVw@ETPo#tY@bm~%Jb6Mc{wd(rVLqSSgB20oo>UX z+1#8RVm_e8X#U*U7+c?3BBt5ETl>ULW9D){j%3B|P0pATRsU?v-@WPwpWNb}V>C1K zA2Qc1|8QM;>8O&rBr0)@E$fw~K9|vD{-_VQ;kp4N-DaguLhiQ}y}`=ae4G1h;pphv z7oBri&S0(E&rJCC+h@e1zm>X2uC)K0LAOkVEnD5Zd)|46q_=hRHdUb~cRNL*q1H(y z&9`P14tM#wm#&KHSoA&VG*X+T8XGw^{yUFoCVBgPR7=a154=7Na}#&wOU!<7&l8wO zWZKgf6hEEd_)qU3#TPTUHDSe8rJP$Z_rUwn{ZHKV!W{jbhY}jC-4#mk+Og>FbTM9T z65ZV-amwa?aBLmuJmHc>Q;etYtX=x%NcJ(>!U|h9xjJ%@n)m!gcj830yV5x`(m`fD zD_pc?VtMV-$0nsH@tzF*-4xM17l~OFw~;GziHkI2vB9CIH=F;yJ$zi5VsEvR!z0a? z96NOSn76Oocnt3E6-J$T=u0J+S+g6Z{g+0W{M^$w4amU%7*n8*OX6D-bfpEoXdixrmsv`@7EW@u5-Kihc$bLm%j9} zarBy^AGv$Yxge%4mpEKZU+VN`!iLo!y1o0Ua}KX9>mAX#^!BR!!}THSi|Tw_q&jwrsn3a2%Bhm@1EX5BT9`Fw?}{E&iJhu?QJHT?Gq$Q{0y=ZIj=DA{W|8_Ky7FrB$Rg$9fXSDpJkKA!@$&a& z+%J%VE_X9t`7$NWuH(*f2edQ)xQfj@I8UPg2==VZ$d%*1XpGYT*0!`Tj|-*p$b@W< zl~AUrV;(-rz;s#IeX zr}{%+C%P|Pzbn{n$LB`|IXIS2%ET)td(r-dS+mBI2ZU#I$A5O}?saK1=if~q#iZth zhRZtk1{~uaB_3iss>|m%rIjE{JQjVjWngh=ED&$?_Z8s#Q22+rXCfd}*;! zOKZl|BUL`{(fmDamY8{Q`hHypvGYpX>tzx*N4dnY;N+&sH-0%!YnSJt z&-rY=RO2!?>sxHu?M`u#Wi`y(x%F2nF?;vyp}&SA=iA1CVXNF7wZf(D;_YXVGkOEH za=O%MH}_mIHf)W@ku8Vb<-YbE>d^O)8@J}|mw0cZtGxHAT5eO6sb8bPl%Iu!;(oW1e2jw0SaGB>|u4rHNF- z*qZjaIG2PiJr(O!pjx{0jDr_ODK8VHc!O8;vO%n&!2Ql{OdgSbx8Aj>Hm~|~_S%`4 zKR}$Y=ZAy{I=-gR@~aqZz#` zXX7rsf8mq#o^PYSj7tUNk?6SlaTheN1+-r=J8!Up@(NEAL7vxcg#^of{;!n2X0N2! znKta9ejj%?qzxFS?L&%ibyl@P1bCWTov?eBB**{pSHHeuiDWbnsMV9D8Y8$FY8Xq( z4w@vm4IZWYb%>=OuK#L?Ht{5^2BJ+&+SH7Z;zco0O|pcZXVlb`9~zZOWj9%~I)OhD zmwg%gO<%j$@cx{im~G2mC&o$LCwmt!@(5(oIsfp6HM_SZBx(FQ|CPO89?2ZWxl8f3 zY~QDQlbCJg19isrmV7%wc^7q%hp#UssTuH7H)QA)nsNyW2YaS*$crSy@Zak`T;@;0R@vt@Xq_}U?gMrg0gr1+MS=#8d7-rEtq*(Svo=0tB^({I^} z-j;|p=5zC@XUoO~GDk(XYP)}B*6`74zwH#$T*o^j3EO~4?RU#(YP{#NOM0itbDImt z9!Kis?Assfgv0jkh1BUKXEJMT*mFY$WYpTd2>p@f6Ij3npo!2#k$c9y^6;T{pW}8M z;arTTR;=>ph>UGlH{ZLrd`UbWp1lu};GJaA`$8h`6vgR+w|T_;!-g#7fG^S?YRX5) za*3+ZfojFR3n|kHXrkn8c`!^fx=sS)eg_q#-#c$F@s4rY_m*-lAcgSn zYw5FIW|(sOWEZ6|liPJatk}uE%g*k# z^x`&{J+tcXk>Y)k8T$1f0y4gTrb|KPG}*c^wK$acXPin%SU34U+ryf*(@43nO+)g; z{&3mpqpvu#V9gG;CMBaUNK_0*x}OZ-T%jSS$&CSQN%ch!_ffq^fBA)R_g8@<-k%bE zohrevkchtSlaLyOl@ffVMf6QmY_@71w?7k@<~=V_d|7gw(Q1-S(jH`|6bj+{Na#JZnr3$CIxZjm-~B&rmJok zu^R)F?*k=xA5iptzCqWopa1W|o($8t7Sm?^>m)$N3zAHte63 zIXAr*sh(eO@k0uE1pvS zw2^O}% z--dlSX5sAvuRc6kYq4ozCgueyqr#w5L6+oP{@0ognocCz5!U5iOS zDj&>hj`M~(^=cBQH{6aX$t439^Y8n}{%Cww9DHt57nSs&odk{1x&J4kyO+-A4bBs} zl;z`gqV5%Pf7gVYF<3En`j0RJc=&qLIvG zEyuUzD`M;7NW`UM-Bb?^bRlQY9e&$!>c(P)jsNu0!#XJvGs7A#6wf6yJ}-SIPE0Sp z)2$N}%qiS~d8Do}+EmO$|5lP{v~t<&=CNem@9poWc^`kdIml%@y3xPq z`_qKiZwRc00{kPC;+w&uKg%Q)KfcfjUj&XpW9%pUW&aG>pnj)?9mD-Xt}S~^vG!B? zlx0^v3M%@PZ~+yYN6zhQ`ZRf`^tzl!xA_GwJ;a99PY^LMpZp}UzJLk)A-lWfGzCI5~gQsZ)TZorMS_{Sa4L4Ei zTYslWZWHylW<$cGz6Wes6Zge$f8Bz`-t=j^g}e_{zGIzqW1^%zl=`a%*pm zHTx-^SAX#62J2PtPN(4>XkQ^&xsi(zif!5Kdr9@vHncBFGpydPoKNfI!>}40sh?=J zGJ5*Nkyg%}ortz!quSrr4;;Sc?C>3-LjpLzc#RD!Q{1=Vx4XLRgn(t=+BjVsWXrle zacOwJ)NJ>&#p9~c5$WhF)kx+%r4n1Vt>bLt`32+8g}<0kk4HrvF=DgpYTOX*V+4_s zrY7E{FGcxv{S+_J!MJ73n)LN;I_s_OHnMZ~7`zbC@m{Jik2@2Hux2~shWsjDscO1k z=v{~-)H#`6&=hrsOX=OH&ZiQMi`*`a=pcqiZGJm!+#BAaBbSMnEdH8G%yPKs$`fQ2 zHT{W8KiB)mO(&xVoaqg>W?MCkTcw^(NB+6Nb#QY~Z{G zp)Gs0S7_%}rO{gc`@jFdJ00B-5{=QE;ay zd2jWnt8(X`v3vYK);p!p1=7cw{p>oid%ybZdBW1;*5HV$G0x`rd2Kaj4SNv3#;e9t zCXFN-3Nl^9m+XRq+&nUlXh14 z1s4DS3;-DbWo=<>cVTj6E@?D2-2wmr0d3?E05#ejiY&w1^$@JbGJx{9oI!6RQ7WjmlPsV35%tOB_$Gaw}q(Iy4DiA(Jq#( zbzh&b>$W7xN|*Vb_nq2}-`DTgIsfxq{?Gq?-pe^tVkxmmUiBFrG?D`X8GozU|GtMS zngISO*DKloH4b3X3p;qb4&aRxpy-(zUfBRdW6XayD4}yGz(EN>rH36v6a!2b0Nh&G z!A%0-h%uAg?cgL2&>C^xYg_pC6M*~Z+t)$`N73$uc1xqb$S>`|G!pH|d8ASeLFh*a zEh_9Gt^^$E2LG@S_T-5^HLAyh(5`J+8a6-x>CH4@}93UQXr4o~`cmf>9 zhbl6`R83Mr`B~2zP=>}EWEP7Q@(1EW-&_~Gn(qBn3(9`C+P0H;DQ<Et8hfCmb*D&QY@(_#CT=LlZ^3 zt~X$?+TXjCGa(_s!)OVm)VIPbN88HFHdA`Tuh{x2P>j+1?MJPAb*?l*C(sC|C3rnS z$U02l#&iv2x?DddDf~eRa7na+a>(E~q0i!>`~%4~{eVf%SCMfLm|kAJGr zA$-#f!4f8(qD(?o0iT5(87Rn0avx!UaM$0^hRE2p|Oo7P8S&;!G?w+qpvjK@SX4 z!P>(5ZUCtPdGocdQ>=wqNj2-=M+2ViDWPqnZi@a-4^jwO)=*qVH!E*=f$=}VI>HxT zuW!1f`5ZNPoF&`?70Snx63`cGiEl?v@bjC=|oesf-6I$|fGNE>=U zIHv{2ou6D7y#hIo4ROWIfXle;YiHVX$OF<1`4+^u;x#HjuNxLf$P6IOX=YV=dI<;~ zzkB0kDNegIZJ%C|b_;|<0f7~~|73df{8P}cQNe@3qcH0C4!*gVb}mqV$k52S`Dyz1 zAdHwjb%d1yEMG0NQqYr$?@wn`@WXViNd|2AV#R7t9@ya-S(4@p= zg77|fdm;@B_DTaq(_?i~d`PU;Cxf8-)Mo&FnE&^75R{HN4*1~Ww6qBbx1PF&*ejP0$6(=4^vB)49jhdXasFE{bu z0n=8N0vQNQh^7k&u{EFW2PTc@1`R5&!C*c(h%dbCz+dHdic~lbq*P75T419c=?kU20{1#(VYg9CbMw zl0OzR2%7NKAyjOAcX~ze7;V+c6KFH=%p(lO({Z+&iXB5+mN7^`t~c(X4^)zlBEPGQI@1 z1n{+vs7siNALWDF6fV|Qj74f^gJlb8=T?7q9R4%pl67SVR(iaq6)N2!QqBe*D9#_% zq2FBC79-^uu&&oJ$07zd{R3AM+$4--{?@h>|G9-UsonzrH%3_%Qgwk_O4OY!y1ya5 zBDOt7{@9)RWczo?K1Of#o-UM$K^0!lo*%m|b-DkbtIb9(Rzt@6k6kGb6nR`>wJ?Gc zTu6P6$Loz-ZX0OCKu0P7l>{XYa}_(zXvdQv;6scn3ly#u_TvEKeR}V}Vf6Dt^MGk7 zuPkb2myzdWASxf~2?Ci@ZwRnxaEy3?3mN1%8#_``(aCHQ3com48v1u+%HLb;*q^st zi|WJ^rOQ?#ggAyLe#J$h`zgzyNxY<(S@t$Td?C(~eZXQy%31TWp;Pg;OMsj8tDU3$ zPe09*ce#a*^V=V|F9EaVLDqVfois*AdvyGI*MK_J{GF*HB4`!4EZsZi7&Q2wAZ;n- z_PZdrto4wo-=>gm%{R+po^Z`t6pmKX^;s|<|JLEOUBieTNzt5fC<906BrU^F* zzLx+STN6d^D%2gZV*O2YarWBdJP^r#zxnr}i0&K2 za{UnMJy-uD<@yYkeof`6!)0bBv09#LB{iST1fbYPG@Sp6S)-~Y9>2OJujj>(N9*!% zDb`0#WCKJS#qAetok^xdCS$4R8?R_!Q3{>ild{obw`-6`EQnwa*T$~+7Zt>gp|qnC zkQ>U5ddmSKsk?Z0=9HroZ=@6g-Cx665tCm;QQnG zTr^W@{&5J1b*R8y4cE2g9f*|pUKb$qUZvihkW>d4;t?ZQm@d>sU;Yz7^%FOxndaTd z@_Lg>84aw<$)SuPOm*4*TIm7AZ`G^8Fhh`ThON7jph9d*m+)Vy5Pobe&wksl7!r(s zp%p;3fAE_J+@q(6+ui2Az0``IhzgulYzLPChg+x=mhQ?MyYzb1j^-8p-yG^Z59}(o z@(40wBIXe=yi%z#&yOm~_ayt2`mp+KFORE^Mw$2RF%`ipe=M$aRV#ZUk7tOZsaNgz zTaCSEzq_+D!IwXA53-8oQtSYbTgHeZ1A?81tD_E0qT4~-% zh&%%40WPQyCe)TrN{R1qiGK_}^aB}-Chx5pqWvH$81P*jQ-;o<5G>Hz^Lba*jd&2^ z8kuNq8Kip^r0g29rQaG$;B_v>KjQD8?S>|nnJ4)mxu*X5_fe8Zx3O9xm9Z2Y31Yz* z)(Uj(*TKSqIt}U!^WWVULISjYMl@9wt2ev9lJ~Qz;1MJcF(wEBAxii0DhE;@0jrA| z8^=ZMN>IF=_nz?0Uh5_#HI^4Io_Fk<4}DdN%p z`_8gIQ)MBAXyR{+$CWUN@GWOF0GT?ltg78?>fVQlXykWBkc^PnuW}obSG3K!)|SQg zWjXJeR=N%^9^23RrJ16g5xLSnGHWu9OO~hT6k<@6%Opv zb9q05{4Qbj*lZytZSh9eu9?;;$sDwjeV*%VXGWh2qHBF-81-&llY$p!qw*hJR}FZY z+lU`>;F9VPeNbySr}39kCmRg&lVugWa>Y9iRU2-lCVxM^@F@2Z88nP_7u*=D@HMM0 z-(<38aq4U0^+`EUI5FB_7srcK2}AazU;4B*L865@VO;Ppm-^Oo8;ctt=qRH5IWtah z<4y~n`Rk3a5LXKh5(sKV`HyZA0oY& zsSlioGQo#2GOzEF=*Qu?+A?T)7GxJ5X+q1W>5=q_%k)K|TWYO`*(H^_Swg4r{-&0- z^bCGVH>W83T7?`o+KNf{)=SD-cFcKmzA?%nxB$z{N9eU!0|$U%Ec=R0wVb5Bqm9-3H}A0Lkvd_Q|@uxLM;*- z3Qgo0^UT=fXex1QmIZ3OxB3KK&NLSBOlDdcO^eBjiH&^ivT>BlVc2dA6p+E~lv%R% znSb(frXbi7DL05UuPuzo6IaY>BIe!A0 z=^Y_BjFCdyC6BCKwGI7m6z3!MmKbNjjfsd6m%7EeJfD>c@D{$P{>CuGXrRq7q*;gR z{G#6)td)L8^)k?scB-I*cq0IdiNT-NA5 z(TxEPq@%&Fu=;VY2YYa8hROzGu6s{$1ufvaT6Dll`>^hQYqZFQweMWl^0wDOQxF%Z z<((liSQYUuJb|{+xzqllyT)Dl@S68i5=D(KVS;`oQ)YjbQp;ZnS?8a!1)O6PHFtrrTIYlc$nvrQpF>m?d&R;T5j~a&ff`AwFGyGk} zeMa)a997+W7Kht=s|$tF$t>0~QwB?E&%jcD`gc}5zE!i4 zdCkL07Gc%x;~e{pmSo|5?6#LTVLgkn+5cUgZ%_D?0?O`t2VZ!RG zn*QYTMB$=8eH_>vB)k7Mq+QZPL}2O9R=F~%wfweYfQ}<{Qxs>lkvYV(k;3LkIl7|8yb#kHNvQ{Xc^v zEfnQHR?curmAm$bYYKsPDrQ%DT_tN7baiY#_v^;`srIR<1xVIR*Xl`9x=6MPW+>9y zxId~W+>mxrc)Homl<|>;7nfQ&!AczzfAjqrOhOmo5ElMC-j>44un?qHM(SYo- zSuQbME?19T%6`898Cmwn+5eb;vm9~LqouYsn#0FVF!1v0Y!^6{n*IbXM{$$O&U#?W z>LvX+KS1^1c$sX`6Mui~`{c#nmE%&iDEe+jEF0znAk+P(BKVaA{%osRnC^ zkpm_$S_!f}mCE0WzfhmbL(x=RgLq?^jh_l-aU;)C-`M4|} zQ_7i~{t#8z6>jHs|NXV(*jn;PA?41Y$qJ9~_J|Dk{jry81D%@s+sevD3~b?IIpe~k zbET261mNhNTF!PZ25C&}5Qe-r!Rk?-$||@?O_fNd_Mv3`6YtFc9`*aV&P0iAqsQ76 z1;afLHFh(#jxHkE2?s8bdC5x8sD}T?dWiTnbXV|Z6Kno7#>8$Sxe<{|{N5DN@wFX6 zi6`8#2qzBu%+|tbQ-lL%xC68*0ad<(gdN649SrQy1@T2Vu+SgzyI}LKkz1t`` z78_R~j;DRxNgIKl+PS${@pbhSn+-~{Pls`d|J>35lQP{)dpGNI{5m1EnY^kB_+h#i zfpj2L*BHlk_M>kehkBNdd$rzG{F}XIR0vE@yAJaaZ)fY%g{_JQ{cL5#medg}5GiT0 z#n!LrsOdwuL4A)a4XtTI=EvofL~oh;d(4h?*#7)^W1YXI-?^o*4hT!?6sFi_$=C63 z7wyLiaIr~DE3wf^AYt4+<8Mw5BR@$}glw?|L!#cihJqr<%GrQ^Zub~@kVqZ`AGowYFjM2zcRod8sXyyxFw zY&y1shD%IOOW2wEM*$zN-~n)?K>o7;S>~XaA*fr81V|q`m$9hhLWfpRg$hj&bwnZw zON=8^gg+?SyAOS~`eYhA3l#q+=2UN13h&v+ZT#1v?gzSq6>Y zg1FzDp?73de)b%#KpmLJq4>{_@wc;h0IgP7uV9f{q#9;_HyWT#6kY9Pzy2 zw?avDK!N`Svm!2W*}!0?(Z;>uPKz;-Q1)6Qo&>-BGA zsW*0oTFG;$5K9cP)-Ik2PmdI#*%qhLZt7A+JU-PbX9tg+L8A@t;x;oWwPk~NCne$C zzkNrM4|yEntqEb34~;ce6jlv46G1l_NsFWoSjumY{*Gv~h8hW4 zJ~V7i9^>$VgUYF&;DfdBAvIY+W%dJ4#PunA3ljRqR5m1pQ>?i4U>3}dHGmVLuj^~IPQ zjIc^~(qgj&z$D|#A+Yr-Za8u$-3;D@hoan`#rc_E4TJVd_v!z1vkD$)Q;SKtD;<%l z)nI;vVO+KfV*NY1;SwFyZIknTwR}3?YK{tN--e)#1H7Q0^AM!o?b-JLkm-wV8#gkD z(rFpd3jvart;~pIs}X}GC=~W$^7dZkV^$gk=~gTVCagez9-CuDNrGUwMXOReJXHx-7aiLqR_IULPa{VkBE4<=anvM zt~f%;V!frnfixbuimCf`0@;J6R!>(SSJs;yeLMKfJH{~y&Wg-M1i(igKL>D!9=5l_ zmKynxZ?~n=Nfa%TZ4Zlz3GORL2+&R6mzYL|l`sBND?2<0j|1CQ!g<>CBXs8!8_fR? zxv>~=s5wFihXJ)-U|YcLnnF8w>X=P8MlfUOKnoRR@_g(bx$vHv2cnp}p~soRD+0Vf zYxrJ@@ZP@0CVxpue7zynC$v#fItunSCpy1acFg)AbGI*5H|n=7nvutiLOYq=$|nBc z5qH?%u6ioMF0{cn;XQI}yAbouLLJ4oX}f53qR6z-*TSZB0~qNKEY~KVVpi03$J6h` zc7sHDE$m)Ls&0ZN!El7ou-mY|CxuqYUdV%kp#wy_UaUyQ9zzNqw1i5Ge~Ur~&=fHy z;D&FFh4(^?=v)Y-sST$TUR_{dOd`okZl@I5QLf_*$wKq+VB7wFQG?=lw^w!PetL=Z9*`dxROp3mX__ z=52@kw~BJpX?rtrdt7iEGKQyl;75<(Eip>pk5ffXOXVwpmZo(0ty^^_PU&d2*2vyJ zq3eNWdVe;%CqH;9Sc0Z7sLzW7VBTK8MnC#t*%jY=Phf5{l1h`EIxO@q{nM3vk~#F< z?a$bXEw(xBnJ|6uzUwpU8pbvQI8jPqjnF&mDwRzytQ=xC- zDPBpPZ;nv|!M(?~gh$h)Mpr70f->1vt?!D>fNH>@GA(UD^$v}XL=~5?%&<9S`oz7^uu-H;@`ui+EbD=g>)^V_e5-2 z|K^=4L$Jq(3Ld=2oTG-=er8lYIHA_Ee{Uaz_rBV5$2771%&C0vLb1FJJ&qM#?R4S@ zuXjhnYg{nXGw^m zqR-MO3_Mm8+96suE^drG#w5@CZs~N8kd7&gsC0raGS;K^Mnea-m_9iMjJY|`{U8Qs zagF}>^jtDS^k({RgTHNGu&(gLI#pp!ug?J&dXL)#6JwIB3MpmhZw^4t|w zYo^Z;g;xp}ak&A4!KToGzF*Ghy?BFgb?^Zj%CX(b9+EzXRDgF7*k<94)y?>gQG@2g zKCCO9p%2y*Qb`%iHb847?5_c{uXjZk7Tg_BcKi}M5a<=kA$X#&dQstl=I~FT*S7uj zlEQ``_6Domh+q2Y6>Qr-B0h=hJ2l|{Rh`tlFhc}k!}|VbjalIODnp3IYLFxiXjArU zFjrY!vd|pedO*RV5{Gaj*BhvF;b-G5i?bs6h4sOOGkE>K8VY8=>-G>j0IdI$x4?z| z?1g?kwLL8MSaT5%<Q5CS!g~-Vs5xI zPkPc;W6cGCht~PUuc{LWya1g+imE( zv<>!X4iDeKOYVZF=91P8@9{Y9FT>~3(1dcC$T{|PrGu5>G4n=VFOnw8kjYb&a&L;I z2FJia(6DoLc0KZ$`x6ANa~O+O`ZQKYtG_O*Zt{~mcmRc&QcNqd9KOM_XxC6h~?-O z#^LEp`cO|Ev`(X6;&gjO+3*uB%5TeP@4|A;+#=A5#! zBdQ#siYs)I?n1JM8As)CxQ#T*%&(d+WeLmsi>_e0wd9kiHLkPOw{dt-x$M2064jy` z;fR;Z=CNnbzx#Lvu=mw0ZR`aDB*%zB{Vrs-)OS9q&G`osFlX6Ji^R;(u#QD+ePQQ-DHG0t z0rp_!w0YLrt|ra%1`dBbX6+@0!=Gz!xfvSo5614D+&F$`6F<3q2|k(X4X~~W*df}g zrk<4)hpJatT#hCQak)SI?uZ1e@VBE{*u%bC@k^bY4N1Do)?JtHP9mR{>!~OuR~jE+ zAMBa%&XPF5TFNpeY0*B2ob<+A_hyWG%U|%4)^L^n+Op>_oc}-?`8K-p>Xi_ULr0YNU0t^G2Rv@ExE><1QB-fGe|_f0Tv zOXoM!m*63rB6oiMx%1dSNm|R3Ad&)O8-dOi;N)x-3*RbI;j`>W>}^92|ccW znemZ6S3>W-QWrFw5ObQ&d|YGB8YLg_(X+qVhig-?Yf1@$R7E_Uq*{+_R$hLYQ+AS~ zh~_S4lrkB`iWuw~MJK;4$D|W!sd<`M zQu&bVDle~^aL1cjo;eQ>uEVrqxnHt^efS$#Y#*~*@n4b>hOH_uXC?=0XXb&O z0&p?g;|aKWCJHZ~0@z*rs}nE!7MB8v89AXUL!K74=iSU-<;S@*IS0#0xR0sInRa@m zPr3_!X#_$uPuwg`&SC4HT|>?1uaYGbbz-(%*cI~z(z1Rmn{=-p`vYK0^9egR9Z9Cb zI8bzKNi^&Ud`G>rq-O|c_Er?GznS0PH4NGk2hcK)^FKiS8@n?KfK&Ie&^*$D5*5^4tG{=$D9>1Pu%Z>rl!-)&r` z3N!*6R#}d6by%A0G^s6ScR!BMSb8H5kNK$SK~i37xq@dPOKnj7_p$5n4o7dVp_!8q zqtkgudto(&=vj1b{eX1dyl_VH=<1wm9D__|pS90NfFK5oksqTHICrv`1Q2!!X8?O0puH$ui_mK}v(DgnRz1Jbs_T{-%YH6;bH{1TVy02-;%H@$*W^1i>qaO| zhvyiVeW;eBr^o$@l37gNqJ`z3z)5TF>y+5HEBQtb^b+){eWZ6w{`sfV$DYOYQ_hDg z%JFJ%6NMAHo=p&$c=lFhpL-ES5snRj`at_+(}(V{_ln=9Wn`Bqs{m_@`OLMCTplvK z!4{LmbRqNFiT9y9rcr)Y6&Kx9&U4)1d{G66S=G|OA#`iFtsR7c0x!TTEn1)?&CbtN zOZ8?b(l}s_1?DiVlmo&@|M2p2mkUc0yL+eDyV4BPL?stHWSzoHe4z2;z5|jg=keWZLWv! zdMrtu@v8e0`3n_eE*?v6G2V*%eMjqOZ#f_yiX@6)!_vywdx?&UuL zeqOTjOIfMDm$fI6G0_aoCC{t;>ra=bt4jbEfSR5WtNkpIi|SXB`+9%5deBV458YXb zp3aQq^|%jyr%;Q_dWoMijAk}A+E$ZV9DT|}*M4-(r{@yiLL+WJFQVK|wv4VF9~oP% zFxxV2Gt?Hdz_VHq>?bg)^r+q zn@qG#uE5ysVVb}g^xDo}DhL?9X#80jPjW(%P^bhp~5ILkk&^2}4JMcbcuJv0GD|KhH4&ZY~V&(Es zx-pd+w=?TNNHID&CHVuQc1w2(sU( zw3Mf=N&HW2cJ^Ad;~WW5?+x>EeG%KDydkvNEJspOn-V{A%+uojGfSffeXs(YJvq!} znmEnL>cD06?Ot11W-c+zg_2X+p)|t%)JUQ61;WSVWgG#}zD|vpI$_t@^5hG!Z#|0Q%p z!jWF{*~jtU*kWL0=nY}kF*A#yE=7vlChM4zfAzQrpiAryN=Q3V5OR$Xp)>2yp?CV& zv=)g}ya<#(IfEj|HbXmH7xA5~Gp+HnfgPA0;vbl6nwI0Q)kdd##$3Tcm$c77L^y!B zbH}avHNP3luQX_fC*?W*qOcQ~f;RXi;JXnoZNX<#VGMh|mqJPw_N7R#bu@bUP>3#K z9V#{l*Oz*Ozs6c}^K-8qs}%^|mV_exJuzBL_l*{T?d_~WapZR(d6@o0TMMagDlUP6 zQf?2W%Ar9-UG>;GOySgfQDCXHKx#*(81_2u6O<7r)lyESXrn; z4C01&mpd1=Xi|G*Rh%s=;quo;Ge@J!iF__e#!s@G=IH}ST5F?{-T?o7L?PLBeCTC_ z6XsW_BYH=*I3XIcKj+=vY}d0)l?+~n8VQY_E(V)?klT!JH?V%T8AL4ku@J^=Szh&d zTtWA=QAK0?qH*Mnre$BvH6&{+&L(mkyH3)jo0`!CU3{7hUA<*+?yp+Z6 z_={^rjThtIn`LitEM@YU_kE_Kk0hpVNBLFL@o9JYM;9y%l9L`Y;kp>pR8G_I@sQu> zI;7xo>)O{u&)87*(HgL{x;Y1%fCUaDr1)^=0? z7Fti}@#Hk@xS1%u*<+QuGb$XIexCU;C1mVK;sEnb;uZnJU;Y=b)OlBC&4QvdQt)wI z=W@O;Jq&N-piZ8QCTDh_P^G)mhI*K5DaGy-#c{*Z^UY$KvDAIpoQ7xq!%??pT>M0?XH#{xWI^M za-{3`JlX{6ux2$T_cT!wTkJEnpVU_aWyzOI_i zr$B1!OarwxdmfN+ofSpnr_2^#z6f-UX1LhyImsc#GZqF<5HIqg-EkD7u*)*#5G4pf7F&*M_{lK*b;G$yI=7%6=J`k7PDnftxQ z#Y1ELl3n*cJAUcFQZY5Z0J)?q+pO}6Qd2@sCF(iS_AXJ_H zSDc*U;5uX+7?(**=0@D*4&B@y1-!5prq5P*vv|IFVUEKM(`Ps0RHY2U6;Nutts8V) zj8)_*v1hi{v6j{pNMYzEb$h!Wg|jc(26QpeBpFHLdsR0N7tHQBKh*sgNx}=dYP-f#$Z{zB038FGH zlmr1&z`1qu(Q{SPl6r6Ww|Bz(Q{F5?%c*yFy8reCa#7dayG7W$A=kg7)k{2 zv08$q`Sjk0tte1-aoA$XP8{8qD@rUS^t{xy{w@+BsvY@33N6lM=x;fmu6{Fu#r=6C zZFy~KGX`T>Zr-*&Z)~P*yi?UKE7I| z_~Br~`o>cuUE`~-~DFcMI)osD;7%&u$qe)nnnU^~KHKbxOh^H4*T+j+XM zJ3t%J0WVLpp;U7)u!E4SzS-LDCZ^hu^4NBdbpBLb&c2*MTA|fL3)-b#SO>p^BLwN@ zkbqvphZ6=~Q7J~u&82&69P2XFEgh7nAC49wFe_gCU~%0J-DMxYczk6)X+LGZMbOif z2O_0icb17t4s?L4MawMsDT1D@kll3{I-hT)Rke;_q%kBm)K89SJCrGGWjjE;>{k!$ zCU6$2MfMh`4p)G@yp*-WoC)U=2!H(|ZRhXeUC(*Rm+U}F1Up9AFXiWMq}jewgd4o3 zML<=~Z#b5UlnF)w7a2>|4og%X@u-ZPB=>S*{Em^QP~4RXj!6+&vqQ$l5|-3)j?CfS zdMD*oxAw?}ol4vCWm99^K`bAY=3e!}&(sDDYvp-t|DL!z?`5BzHu}Iw^TJF$9F-G^ zkGPc?{F-+=twvWUW7X_IZtLM@d22Z36_YQO@!8J58h_YTTs55@q68wJio8@_N?4x$ zb)3&d=zZ5+m7XJ;Uf!#zT*Z1<@HAdgW>F1BgX@xnPNm`*o*vSqFNWhgsD4wJL$6k! zS6=dD{Uh)b(Hvz@anh?ZxxtM6ZqW=f^(DEL{-3=@p2G9hDX!CJ2QV&%jOSy|7PJ}q z{BEIXPm`fn#YmSbL)^=EMd#S|!yGNSSYhb|T?46J{4yngt&)6`i)7CeDI8sm+{o6W z>5J|y=a1OMmoep@?^}CVT;-!BoL>9!g@@(3&dK!G5{7LoFPNNrMRVVi7rGp+*=QpV zil?L9t{KY>xAyej`CN~R|L__qmn}XEAN}joAzy>U!ecH$O?)&iEk>KkV4-|DEafVD z{nx@QPS*ZVA|lvO@e?CYB(tIueXP6#fzQ}CD|O3en^8wV3gJHew3855SiD@uTrQFbYQpPyZxxUdM2xQZDpH*kS%cra3RpzA5(J8g_nU= zDbZMFIao%j{@cZVSHd|cK$V!yHBlzN4J(9x<7>qXSms?-UF0rc4w0lo;aab>DsVz; zy-_Ktl(HQizVi<|`VSC?ZL@X7!A3(BImi_4#h5s?%~{{^u{UTa&)_Xjs(*zB6tvHA**^d zBSi~(dT{O}T$LCAVHGf$&5|qLZ+KFXzZi*h_qp3hxiuFuSR~fmdMO!UB|ZjiY+PpF z+V_-fz{59Jjv`7rByfEpq(RPhgnSC9AfEGsAHKpEgAbrNlq%u~$Jh(4K8g}MOrM;A zzZXB%LttfU54;~ZJHyCa$03zWA!hm;iQT->3f+y^0~ zYeVIt8{}sPmXX5io7yPyxEL4CtWPiZ5x~Z(0p8eMc|mw5r%l%7v_(Z&F@(8 z#zV^HblK_W-(w#*1xv>HaG+Fi(>lS^OJ|*>wTxUKw}i5~T!(?wSTuXScNiV|^~=c@ zh*aqGH)(*IXa3vg4Q$=EZ|@p?6f#yCR~eiA-X2q+FG%-ZJ~19;jLQ+}o0WMSw!sFR@8H~MxGEhRL?xE^djc&0;{5~vY}6YGtVcz~ zFr^)in8=Kb*P6`n5IgU|qy%-dygRT1Rd|>qpY3ze=M0M|u@Z|EB5c)BHuJHqjMLd~ zm00w{jpgPMY6_dD^-V&IXq?%Q;XX%RUlJ>LZwj&jL&|VUhaJ3*9N+1@3KatKmx>$W z+=lvG+g|e5fHHy=d9iNRtew$2s}ybxf8%|)(dwXkP^8D+z?cSHN$79o46fN!9I$EA z4Hx?RApLaK0B9Cd+gx4bsU%lvELNo(=k9C#P+B_z#R1zN}3p$f!Rf z!I~ercbfwU)Q(lNGJO=z>INXI^_E?c7A)SoF;dsw$i(m#ZNR%GN6 zKUQQ!X||Ba$6tdg&_*jmST5YU7$Z!zyiFj$ss-Rm5+vk3jd!Z<<(u+xk>ATy4dkQ#(j! z^x1@HjN~514nsud)zNI!a{M|Sd%m!c#0?hj+0~M6#Lm$!)~tU>qXFVm5DN;>;b`L;ZDZCawcL} zq{~tVpX!Pr7O`WOZP)hcusnTl!*uPjm?qOJiu<=69&03Lj&v$y)~6(WZ#bJFGGjL; zBbkam_=uHFrC$E_RC-%!sh}6DnE|~dX2Vs`J|qa1`+=puN}fc;Z2^ed7UosT#`7Uz zzwg&n96V@`hr#=SCw8hxRXuS&-tNurUAxC+hCZ#9xNBT%IYM7aQ}?4`3fDPuSril) zJyST$rT7dJ+JKnwFa_ovLR;MH8x~%B#2b*6W3{;B+6S%3BU(GqOJFq_8%8$k|cXn;8^v&tL^!tmubsk%@n{)0Uv}08H#8nqL8Q1sOqP;B z4za|pxDEeUNAbPWD|X|K*#i9uy7fyqOU0?|a!+4f>PMNU90@E`JQjJO!TAF$rlO)p z-=Vb9Et|fzWBBV=CA1Z1-q6HE4_-P~{60X`sT3gOktb1TvryqKN-80>nE6+&^3a{`74?=R7N)jtQ9pzP8B9R+vl*qVJF6kBVqSNcZwQTo`If3BGA6e+j_i zsS@@OK%D@#cA1aIOpM@9^KzWrPtwzWF5Z%o{K9&R!+P_L=rLy!c^a93j-Pp0*|#?_ zg(tCD<>wuGZWm(#Fy&MJ1yL1}?HZ1?aM#Rr1pb%Cg^+syp|2vk=nkakr&9`Ijx#Jv z0Ry@Q9;5a&Rr;VhR4mQ)6h2glvJh>AIN*~rnc3SHavL0)p`@=9vu6J>V9HbwTb0%6 z`rxTZU-cZqFUp~R9e?@0#TVBVzKndIlvaP1^ZKPwgyq@bG_q)ee)TQH#kCClRKHX z7|O3wK0QYTUrd*t&7-MCrr!E^15Ky>A-x4mBKM-adyZU}A6>3SBHnK@x54fX6ui>l z;H*QaoP4*lFwP`wx+6Pol!n({3FIdxpn@;r?~ueDM;kW5*QzMuZu1CZ>iWL2Y>VJ$hf>|dJYk;C`Y%5*CA zbOt34!@EM!Sk9!BB+3^-Z)e6+idSTLNqnw63A}pCTr^l#AB|9PXRjjINDFha zmh-cDmc0ASZVJm=kAw6J@2$IP3>+Iq*^*f8BLKN(2}@_JWd+xyv@t#59|<|xN9L<8 zRCXiBu9&@M*o`&wKlE#`CCO+fZ&bOr)0np!z3aVfpSGts?$TEmC<(c;1!{u>Aj4e2 zh|qb9L`5f}c0T%~1@co{sbbktEl%{jGv@a*+MIp8^+lyIJq*M>U=IJ=Eh66azKmwZ z`r;H*^1sJaVpv}K#DA>#d=1vGsnMR1U^Q!GdP&f)jBgLExB498qa6VfSr3;20sbTt zFkvj}W?io>fGvN(^==U)5-{|r+x&~Bp%Fcm=+{$fR?+T>+#tWhWXUF){?;U18d$j_ zme(BPbl&&1`OST^J!0oDgx5B*tN)}rd_?Ao-Bp=_ZpMX&hPu})V6zJQ)x#HlDpRhRKmru|6ARIo^dNY zzxDcL;O$>z-JN7h(7c+|>Sm{TGhk43Bz} zozS9!Tg+C5^ywIr3&VU=S`v@kIA9F=wCtcve?Rpd8fyD=`D#x~EK2!S`|Aw2Q>d z&v{zWx~4Yfc?&7h&z+pCNYiAfRkysOX4P!Je|kCAQC(o+3IcaPJX6teGOn(E%Kv-A zec||`xCPeP#lQAr!l6HD@Iaoo3ly}shIMS=oK~7MEo?0MUa`3ki3o8%EY9u5??!Z8 z3}?>PMer}~23TJRm$CJtVcK;mG5|u2Kq4t^q4#+-la5);#p?3U)6*51He6oKiKBt9 z=6IT2`+f}@`lzqah6Yx^y=Crf?Pn2Azx&YL>Xc&QY@qOS`&eEs_sLopQz{Hwq1B?U z!+4}^=hnNLo{AVRp_d4C;RUmAArowkZ|te-^%LGbPP3Ru99M55FUjKFayN)RRZ%4E5v)P;BApHjf=nX|ud~2qy!_*85J<9}}AD zRz#KnSs$@nm0qr2;Vlan(-wEzi?#BSJ$3Mk2V3`lL?cfIXVkSyMc%;U4S9M zPkPeUshd~kidjMbRZd(cO5aatu-6S4Z!4Kf=4BRtE*TBXbET`V?OG9hNl)U}0f=ae zU>e?>_V%LCk5<{>5W5c}`#N=C*S`4#_-KmH=iE5MxR4U%0Mx@jHC1HfwNXDGk~u0? zn&DjQ9%B(_U$<@zE+W%YJd3x@Juc`QxG`^64+|#_6-&EYibJKG5B=F+(r1^kmS5bj zq|C_C)={0wQvIHW#G({a0YbJxk1J@WmasjWCw}KyLo>W-DBjK~cb^=s7Bs#81UHDk zg~0EpMX%r&H(I6M_IunHxbP|30m*__u#WHf>f6WKCs^{a&$=|fE+dT_G?aei?($HlKzjpYndzxeEUb^?ZM~YOpVf-ePxpsIwc0biwZF@08{dI>me9XLI z2l|!iy8f7}UALY0(Z=yJ(-mdi{P;AfTG%Zn*KX&rHYe4`K2YKCAmnunaD(YhA zRYgGerXz{ol0G`ls>c0DKNsiR185F7A!`~x`Z?bdq70|N*B$HYyapHaZ{rG3=5)NIjy((Ol|_1tNJI*7%AQ8&$(DxD7RG2?%Gi(G;d=9~ zXvJcEk|XTe-MUwdWD{J7al{RV@2PBGuTSA&(1G>4VcC@!wA3bW)cdu1r+Z%E6V6{D z=!#rpmHRX~D`%)WQN5UEGlN=w^ed9{X;=Qz&i3YH1n*`vjwIS*Ed(sB?|K>z;l13A zm2tT=Hlb0soc?)lN7$S$wP)$uBd;qRXs{CE#Nhg^o2%_HDlB~e%tRe}^)}|d7zm0u z<|sv%QnkoKBqe>`E%J1Ce~P2s1ggD|n+JoFCcsV2yM14S*lfe#^iwM@L+mp$yI3O* zo;;&*Aa_=MwyBE$8h`nWL#g;mh@TL{K>>^pc4$f;(6qU%(qr5G;V#lIcCP z%*))k@^Wq)O3?2AvbA%+f2^*3BMhq_e({2cD5Uz{ZMsObctS%|nxMXWc4sby?RBtn zd|noylFP0Ol)&|@h_H>p$!kk2YSqmV3+Ky9-!TSLOfO1UEe*Y4#^n=}x1Udy2-gj^ zUr0v(vZbsoQ5DRx3#n~mU@5hx`GsaD)sLlGgCX=zE0KMJx^4s>gc_g4@c#fWK+wM= z#_e|xXZpii$UKdjIuo3y>A{qHo;AzD$J(J&HgfSQN$kPrknS6H^7nkVV{P>sx9IZ9 zhZjn5-@vqP3loyB!rC5)D6nkX+C;>K`!Ob3h4qRU!3Ghu{orTT=fA7eeQ)ivZl<$P z3{bo?LX@{hUnh2?(Bk{dEVx#GDa2Go@btpSX9fZn{p_9Bwy8Fv;$yQ{X zdRuR}P6l>TKZ4EPnPf`hf1S7w%W(R0%sUd~{fXUc2qboCxKb3PVb09|TLlz+;?m7F zqwhn6v3r>@U3m*Kr?n{?NQ(t34{C|A-x5sS^*b&7eMR(bgC^6mKuR3J_wL<)Mfz`k zlX2!o&^u=*Nu!B+R+@s5F7}kZ_$&@ty}S`(a!C)RRTuhvkrh`zEn*`{hzInz2~@uf zCo3PIFoZ%!3`sVCOV)ErM`cq6ai4y7@Fi+nq&HYg9y`3?$0K-r0HQ)Yjqm8EcX8&| zeI5EwEo8pC5TD|=Wd~gdnLqPP=?VyR{0Y?|KFps70t&p3KNYSN@I&u(5j6?YjUxIM zL}+^Bbd2cwxU3^4{dP$i0Yd}&!e*s^1 zK&_Heg!FGD{RAScgW+0qSv+L!yUP^&}9vE@}4{09-Rxj@@*R>hH5_H_Sgby z417_}kuVg{e~vMX$T}SdCdl2~o#kTxVf0HE1+Z(Y+#D zs%lUB978;`)*4(?+R;?ShxAz1vzuS@VY6b}O8FX&4Wi*ui6uahXF^wC3B!?*%B2gU zA@kIjQpG7pjtBt`XTB@lSPz*b7uCK3GJH#UF_j%t1iO^3o?P18@25l|)Nu!saL2k( z>X06Cg`gaV+muH4Vfo16MQTHEhQrGhQPUu87twd|cIU{dnZl>|99d6F0`@yc&dY?( zPlQa4HhpC+WWF22N0gjldafX2IKxhwQClEw5CHpfhV@3nSEiidVXhFu<_vEW>1Wmx zK5+=AN-7FfHwXjHZBO}gza6oW>_H{~7)f8+^FnpHwElbc^rzw8h@81_M%bDGUvoyB zuTr&FA#8JHgjlNc%8`A#->z`i*RRXMl)135^2be8MQ@q8ZR2YO+#dOO&E^-XcN;qm z@N|4|1Cn1(lo(DGR`}695|DYLL%4%an^ChMwQ!|Vc0%U5w#u(tq-U{_bj@t7(RW<0 zZh3FGY0oaqA96tPn;Es@d>{JRBgo)=bQ5Y<8W}}b=Hoe3x=vY9Sg~=GQJUcVF}#zK zfAF^*&>T18=GrpTyNmOHFQNcnU;?|Aqay99`YoHV{EA=vw>jEs@wM6}LW!5IBk&O3 z+{1OM1X8+mS_mdqy>Bd7x)C#*K`=GuWVZSeEw8EV2WPV&c()u=j#Q>*&^0e{Zq#zT z39ww{jP{OJ5tfiCGVkUK%7Z4%S}(;)^O}2gJUo$sNym(%meRSyac=BRF$8LyqwZw} zN4Oky%qpuO#jJ8W7-z<-MNBgr$(%?8Ql%SoHO7Aad~)RNC1P=^B z{CIS~UT`u}VFB)3IjBp{_%9|@AWkRfh^ErbWXyCoUpeHMhKCWg=bGO$tlbd0a1jr1 zQW!e1G)bdOs_?*lJ=^C3T)Q-sj7g(lttQU)Q&o#6{m@eoyS`|qrZJ@;VXDSgK8ag< z3QIDv%oI!!&LmZa-6vTJ==^NRY)Az;$(cOdm4aPA*(*}zHqkG#1wZ-r1GQS)oqmr+ zb``qzh`biS(HP}pFq_d{l7XmXxyclzqP}0JRF&T7Lr|13phR0ZeQLSwl&@Rj;$21` zaess{c^{zgb5jV`ax~>5A)v?6Of;cJK-wBj-+hHMTKj!fRG7ebXE-H$dztqA3v=qT zwFcrFX!5ZtXUgz!l>_~xmCOh_@%LkGFY#x#qn=qljYs5}6V8-uS0drGRUfLptR<8K zVe}t`6yZHC_m^4cqxx0wVeM@o`9;hbj&QDM>7_@6v@-xrT`sN@zWTwZKX-F-`dQLw zSUJH|2Ow`sTF0u?hws$xB?C`gJS`;{$uR1Cg|f~X=ag8-&8wd;$I}k%Cd@MOwRSnlVm1WPqAMN^wiHMAq#ykk zIyiNhHrfNTq;q-^2pCzPpA^jpED$hV!q*Cccmc6%!|7h%TeVIHHmwnQYUkBn*f*3Sd z7piimkt~c-1bwBlQ$IZ+NOt3rdmA>wm&%x88Ydh=&75gx^5I~HGu^?D?mmZGo&Jt0 zraQ6Dk0`BKx%(Vl!@T_UtDz6`r z6YIQoe&i%=L#q9qC~2LVGkh)%J(_jEL?xNAhW*2l#0i~ zD5Oo>$fEPy_-)AslRjXxMGI5RRq9X)6eE>x+m7yT>oJ<57=LJIl>XXc#56xo-HbW` z=@v6e8PY5vUEK_suc<1|VvH5t!7plcncX*UOWj53l>skQ83{fAiV1xsA2QwT;8$=K zG@DW8I9;gY49C!%g;7GnLJKjUZpG;$Y@~psTXaXXWOOm<_qsr2kl-^nG`lzcu&$W% z65Q|QjOx$p;{VuuV(!S&(?vS-Vrb~^r?XhZxBlQq7bVQ?4~`G;`h9QyHJZFt#4*-2 zAxm$(yNa#@b!D>Ln|=o}z~rSi-F*)-{Rz|roSMc9=-)b+aZK-1(*=UHc$;_!qKb}p z^Z7@P8T!}T3j1(lpjXT}ekzyukoC=O0ZLKC{k0%t6^IuOn0Sj=`7)Mw(RIDu&Kkw*(3t@Q1YTG1?0hm)N;9Ah>Gmab*tKQzdPD zn;R@+-Blu^ulv%=gOzdTHo)!=qmO$CjoC%r-$qPv@GYZ^Bp2T%ynY~Z2xceUjqGO- zHf@p@JD6=?%-fJwl>ctO-CxYeS3q)Fu8yMX?n36fFPuwomaa<^OgV~MQ3|I_=gc$_ zshY6n`_D0TqL>x2nv@3T796Wmk;)4`5g>&j)QRAoCchs%nI0)O{HZUsRtyZDWurtC zm=DXW9;)V!BK@;@GH~z%eDq z#@&R@BY>+;blQ8oy0%*9aL#f?ZPm@o$>`37QYZWz%~Pgpij~{hSZd=<_!ET6 za#tO?1$w*uc$7+*3hBf2NP6n&Li2CI#toj;4l|Uf5eWMmoE5W)+s#=KB!Yuk&Wdai z-2`mD;yZEqI4eh`(Jw#+uYB50r{>{|-F;sQm|>NkQ01yP*^ZSV)JK;i7U!>PPx~s? z`(!HSI0`8CnGxG=Rk5GSgK?zGncGqCRFx$!f89!GwUu5Bk40erk7Ms3qT`=HCenp2 zXoF0Li~iEWe~xfae16=LDD)|Jf#Kt^&+#TXFJi7#1fbxH7naz38eD07M&6=z7`ZsX zSsfsNSOI5sysOHg@r0@94*rC(T+1z=+@`&HX!t1;*8%hEpi160;W+r`3uMCZ@)v}o zI|DZ?1IFEc=UumVFjWd6ON@715lxZwOB#_{PRB&x$8wws&8X8jb><1_a+p8oIi{36 zq(QD^D>=99qOZeVa&a|P`Spf~iF4s-li2uqvRo*qm>;+HwrfUPmdAywy?NUr8#r525@@r!Mm! zgY`g5nB_^#6A!yFiQ8CH{Tq4AU5y}0f4A=b z+=EgKa;K5j{T0NMBfQ!tPd3z@0Q6rB+}_D}jSH_`>q4!7^hp~1 z4($AO+g$`nN3b-x)5+hmGTWv#_}YeC^DQqsa9yr|Z;2!vx_-B!-kn=`RoP+Gol13E z?$~wZ>vzZJYv6IU&1WJbLox5*H7YRe)<1TkWO3>-!Hmua1?{oji7ualccRA|Y^P(_ zowISAA93|Ip1VYMfPJ)SULO4un7~sk3JyOxo-wY(fb;Az)2A7MG(VVnIn{IJM>?ie z?R!$lOyGaUZN1gOpEl8JrDXWun%f~3vO6&be3{MIqsj(7vpL5_f9-unrG`){Yzeyy zzg9>*ez4Uc5f8iJGqZq0G08Bwxa<~dGs5Ou&Y8irEY6l@NBYkNJeaMEs4=Qnv`N|Y zaI_`bf0d5X)a^Cd3eK_cril;}Z#$Srgwpo;B9${^$tVs4qLk?h_Ft3BP95Y?wqIy5 zv;^0-J%ZAwX5h?@LV7YBUvs>$H@Ac_75ztuMgH04uyS|q;g)w@85{6SUn548M54kF z%st6Lv2nPPUAEK=2oiC;Bf^Pv^ihpgLHT6a3d2$I>UmCs7Y_IHSN{wzXYNCBe2PV6 z;eC2t=@yV8zK%MopS#KM%EKwaJiIUEHEfip*F;~TFQbd+fFwu*p+{{?Q^J1muO)81?!x)QeGzNy-D`a{U9(NQsaN!n|b7ZWo}bKClCxcpSjH&?IH z%Wb^xO60`?w!K`VcWUmkC%npua2==h1rU5pCHBq1FlV|1l;*+j+SG8I9#ZwA zJ3$W|Iu!-sKhEJvX7F+q=kN|63Sp)=+(I3vE5Khqx}S=L*dixTDNSF6>>&xmKqQuZ z#bdqe{w=dKjP>D0Wf>8RvSA(k)-`uLHV>QYd3K>!@?&hMfR+pFQKUbDNDGpUpg-M* z%!O3_m5j8N52I9?_}YT0bj_sAFQlx$y{>)y4CWIiC&ELZ&ypd1t_&SQ zaUO5tJJSh9J$wj8A2$b@D}`|F;q+ts-8J??V$6-*Gb)R@?gF(=I^Uq~Aq zDGd@Pdu14MGTKFD?t3yb6T+x=>6JUzuD4iyap9YT=~5&#%Q?j{gZMJ%)Cx1@ zPOyzrLs7?_+dCCyS8Uf@{ny_C9((Esf_sUAk#L1HA`o3InB?_5Nf_RGWph9Sm)yd?ZbV6k9>@U-)$NX{g<`J}8ag}%Vm^bX_?DEOtG#j;kCp@Ry&9}Jl z*_*w7uGFx7{T6d6#*sX?W3}>Dylo}U1yfZ|U424Uys^xr_bhlOgHTT;5PWvA8p#A+ zUW^dSmX7oJ;Imd%{qu};Q^Z1oV6vm_<%eJv(kyg8gAw&ZF^FQJvst*>xKpVrkF$tv z<=)Ldeq7J@_DIS=F|^yR9?PD|nBu&>2^9-zf{?BRnRmV{Rb>LB47P+&E1DJk-s$ta zhHZP?o=%f@>Npopb`Y%_ALK`${ev?X#e9h)chO&@a)5CUp7ikhp84+{A@b$+m4$}* zQ{?VE=VI{>;-u)vR{7DLd828@)=Qc#q~jrWnzYr0Xp^K8k>K)ST)^!YzbG|h?;6?E;l?fxOh#ko znT2aiR>n*|3KOQ>GNU3P{cA#BgCKe8A|-*88!qYcgTMLxmi_Dc*H=oP7Nl%aW}?@r z-X>H1;X2hZ1+m7JN&971{PM8y(9yVqyK;pIQ>X@_&sq2O-)M7RSv3DfGF-WZ7*%RJ zIY{B8S?3c5PnTDO`+A&oEgw1=)+aqln-bv8rI%C74o*o0(>9ozmMeDXspQ|{)i;+O zN}y|4g-k%cbSS13twY1>7^OKB^Br-JA%i&-G6a*uwm_^u1d|YezSfR8gv!so;Y2+> z_M15+;ao`;(OEAcQ{_cj+n|fsS+N+*u<+3>=L8m~p?8d4}<*zAtobv-IlfvIeUiBY8Gn;n2 z1^0ftkKkr*E~ds%5J$M>=|hc&v?)zRo>8B76vQ^iHU?x!H$J?aGrqF}mw|BY?bPpd zC)faYPU#TFyi@!`Aa)m=(Ey}>+$+d`r*PQZiAPnZFqaZJ86_PQ#Qic0eCQO2^WL4Q zLn%S(=}OwX`^uH>19r?3>Cin8_sUvhDl$HRdBn1dzjsDL@L}hwTU|f73*KTmc{|--{6jImt{ez1wv%v4_AwKcAV#&1il9ATM1{z08nk-)aN8ERaSV=Rc2_>7jnn&? zj(V{qz$E*-n;%lOC)Bw^`t5wSk_dA&Vqd|H%?_m=DIW;!o_Xld_8i&WAK^;`2vj^U z(NWD}41K3SRMwOp+B{dP=8K#36S(hwy&6c4yXae0lOa9Wj@F-}!pi6R32KWEG>^d~ zCklxs@A=6u?vW1rvSY)Uo2^9+<0uM2m?meEka$Zub|%y&NaGn_C8x!V1nqM5qKN~a z`y`skiHCMo$}_PKA+aS8MeTVWuHW;w#v^fHX(dQ6d~&5R*^ZdbC1B{N6`F(d3MyP}xuadL7~3fp9u3%`)n_ z{pHQ-dSeIMaclFN21(gPum}9-!5p>c9-RR}UaRod7 zj>`NjRzN9zXF}^kG5r`*@EsR^vP_4r1cUI&VN>P249rynJ^Zs{U7{85>0H0imVRyv z7Nch~f#8$2idq#B(h-sL`QHK2-?MK1wo>SO6@^I_Vu~VdGvd`0`HF~yElPJKqb%l3 zA*V0Jh9ctg6=RkM&#Cy853!D)8k@nDYtGZCcO;zp%)`~d_66a(-{JJ>xY044>mN_p z!>?_83)_zgpPB|Dx^Q$%W3k-H%_Dx^fGfKf!FqT$7)4(U#hK^PP9(1Xyxl}){1?I@ zmEC-sX-{JOwcl<1H6ym$4wnJ<@rCs}eQhRsSsX$CopAd^gZCTB&ClJpxrnoo6qjh1 zupj*5zS}3>DR2LsQ1Wvn_n$byOD7ZMmvC0+5{Q0}>^pJd^qBGnn}j_+xbAnrx-TDw z(K#@Gua-nmevsCtsjOlV&o1{TU+&|aQh_ZNB${wfXv_NGfsY+sDBwf0t@U%wZ^RO`* z?o(uORXRXnU`8Swvx&U_P+wW0COjhhOAxCT#Poh;1Gr`pd z3S!#7UY^bL>^q;IC)|1+*Vzn`_1#WWBGunL@FT}{l?EcQRB~ukxiN(v1dpz2L=;`M z1~*iNy(`^Je-Nsjd^6dy^OUl8L4LPM;%UmA`UKK|4yI1_*PM57eCKsx{r0KfFwb9r zaBFp)4t<)S-WZ*ljFxqThiR^Kq)L@@=q7=SI@&UV0{lRIizN zLw#y_G;XgNCh+6qDEcoro=q#;$&7t66)1;7jF{8Scbak``OJ){pE*fU+g{=lpDzNz z9&HkR{*>-Y7p34#GdGv6f=$=_(wBl^e>OIu!XS;PqH94(e%YV{pLcM+oQb4UV7b0( zx=_xLh6yPMx_y0MBDetG1~`RNhmZTFtr_%L``&t%%>&%%42*A4O{1@Xq2E%JDi{wE z;EaM8=h%qLdyaM$MNKUI>L*P;#N~VoGNYS8)P5^lNMD6{Y!ztJabTjinrYEZ;2yR5 zcvE1Bw6;+*=q5O>Z=39`;{Sn6F&}FNEB@6EAI{nJr}?xXcfKYOKl> zCfK%4K`d_dnrj-foCd$#nNjo@F+!?6%a5qI4&5}O`Z{Wb^tVMg)5&$AAAk<&+^R!F zb!WU8T>vh7=QnNoI&6`jDpABv`FX{M0KYlu@ za`U9n03~4*LP^!N-C1*YeDf-^T~e-SG`jN@NFGWVJHO%PB^eYzsmVI(dto6yFe)IE^#?#$t*eXp*oLYVXotVrMf2%?Gp@&u}r zm{ZZ_aLVz9OXgIEtYuk?e?9Mm*Kgy1WI-x9>-^j5M}GyG)xX|_jsu&%|EUg9_Wivk zf+Y|ysm2b;(T2OhhO+(XgCES{I)y);nE`uBg-D1&Z&xY|0|qN83Hsv5e#Au|C`q~* zJKbFBTa>_mFRnzqonQF(V)nF%hBq?R=CytlIVI!LF+zv|aOpxt`sqDfl&+(u>(6EG zM`(m0@8Kuz-|&9Me*C~R?J^#w5r<%EBk2EPY%yjuu0`D3mA)VYnX6v(AH32Kn^URv{?fSA{sg#Pfb{`a;zkQ zK7<#!Tuz5zN&;@RClocV*qpCt6n%1Q;f7magAmKyxKh-+nbx7eA zgcEOQ5zMur7}%y*#Bh}i)9AQTJmE^8C`r<5L7Pa`l`&ro!l{`1QBU2KyyksP%AcW* z+v&q{h^su4vZCK%1H`Qm{<8YA<}~d@LHc>Wi=^3T!fdok%~4IVQIY zp)4!5l>8EESv43$D}2S1af%6Cw}%bE6w~DGcJBBW9dP4@J0VvHM^M}eZDw>f7Es)Y zGfd&G5qIJt6AKkeAya28sNIh@w_G=W(~sk2 zQVS3JU-k`eS3u-!n5$`&0-t_zHJkiEn{&0Cd_WU(wJKcc%au4YWrhx2IvX;ZqUh`S zkm-*iQ>+~!r1L4rJj9c~7cw2j5TfAZuS+wyEDPFYa6RWdoAu5%#Y}Wt8(%sEC-&1(*wl=?@V&EiU+sK%<`gH z=;ljIplf2PdWpFrb0z>OTpnGSKDxMdV#3~-LwHT{3CZPDI_?h6OkHlOI_Kp-w;AX! zDc35m+b$lj`_=boJG?cJ2PF5f9()3sZ!YvxtmL?QOLPdU>g^Lj(3`7w!&OCk7U{D^ zFg0)A(W)aW#*SV+$@UR^0Ih;4^tGzMN#z=_T?P4Y`{{fnEnK&}`rY}25_>hpR3nO< zP;dJW7?!_)+<6z+^Dp~@n1eNCbWZA&$B)yOY0P$8oFb)z`ha%~-1LQ2BiTy&a}Q*`>L_=@P1cq+{;9?-?@t81 zys&=Q@~?0Y>Juo0SyPR{&F9YAZ3b7>xw8s|bnSG=bUV=5AV+4~_$nvtg=lgBnmOfk zeZBsrSeJo^?VIq(zP$i~PwkW$PUlpJAe_vdE9pvBeD1~^@Qu0iCMnYALAuPFXG*`> z1es8K`iTfKpM6F0>u~oHJNd_dYByeL@%%XZ*>b%_@Zfz2h9CPm1pbL4Asp`fs2`-) z3$XKb98&Cj&4aWtjs6O}yD(e_j#;@2b5rPCL!4Q(-bGm&qLaCibYR52rZGPx8b{Uj z-8_pYqIMP#e9#d*-HDql@8)N^K5n|Xy0xk>dW-jSuwAj7GuEJ{DI4#Buy!aVS~slO zsnx#x;4z^jL{5K<#5I`g@rCb?xh7pEq*JCwfbc;yvDG?V`52OB=d~7~ZXcu*7 zeS=>t9t^Z>n)OwGF7v*@!f=YS()4q{)9+bmvcV|$9J_xX!EnM*Z;AEi!^I)_oDmy7 z8!)#LxMmfu`a2mpm>iBm%;P@0e%jrzBWZHXxhs zf%4STD-Nv0BrUh`D6M|+-^WL{yrOJ|4-C8Fpo`(R@8QPW!q9}i<1I}!BaFPndnRf8 zvx6Q$skFcyao-<;O4AeGMNI z1Ce~#Dptr3?i6#acAL;&Y9aH8T<7L4Q;&vIDekgNKl*$+&RC1Q>6!ugH*fpoG>^R>)8j(Z*yPJc5T zXxlu-|8Yay)0|esMu(Vl>t=be$wH+c)k!%`*SF4Dyb;$`!OZxJ9{%_D>OaH`ZyBDN z+;_|p4 z`4Oe^Ab_Zi+7V)Nnx=M^6})-5OLu3#!7y@#n7issnyUC9VTPtq6k@Tz(|Uu-OuN84 zXO`n0eT7|O?`%pWk$viFIv+bg+|{f~x&UO?>UtgevN+B-4AY^nXF_HHHv9#U*`*`6 z1J6)Y%Zr_DH0`PyH+!^uq}OD)gbQYbLvFq*#au!k6HMLA(e8G&*&aW9s?>J}N(|k< zk4YTQ68DPh_@YY1omqezZTt^z20aEBh3u$h;RZLw$O|T1r@RyjbfB|`3H=DHDd(FJ z5Pavl%rzs%fJ=yo?gN%_$swnaTvto#6kKiQx;6XJW%@X?hI#IQyXKe~eeo7#>hj>w zpX+|UO64_kUFS;xvZVBSn{Ka2Y;*j>vKA*H7Vg?(RdhU5wr;a2+-&Er%Z#SGz|>f8 zrb8BF{cdLpl<)d#5#0^?-Gi$mFo76CdL&)*ro1o8gf9C0t-@eDUhELSuMp7bph!Hn zn-Uw=BfCRyC8+hqG0=ug2D4JvX|6jlfzs64S{mJ24O=O$D7>O06}xIl!8yP2Ot8rhud2d8h` zEz@1$@+5c586Uv`H%X*8J)G6Jz0xM+!MMbWzct(M;t_+fyj9~Jos0c#?lv)P$`UWp zHeKpE(FEHYg`(qj(n#^7BYWKr3Gq|P2fwxspTgRPr$4$E!<*IEt~=DkEqdh3_nusu zY=;I1U2wS_^Fu|6Cvc1D)vVTxZpnv1zuRx_uwR;phglzl;TE;5RgI&iwwO&>+Hly* zC5|Guuedt~Om>@{mq3d19ogFgounsRli$5&?3JjcNCow#;Py}*>s)W3^vSn#r)#

    X2?AtPFvy-nlgfvzliaIh@| zw?8|WRbaD(GuW(B^EEm4(BL4}0)@B(J0BWDkdN!d))b7v#<=2d2lmP3HN%^{>Z&gL zKBd5Z26po@UqHvhoO?xj!>1nHU2LIp>~UQ4ty)$_{Zv6xzF=pv@@X|GJR?6~3-4O$ zB&hDh1WxrV<LE3GMkEvmex-YGAarG(b4HE~=rz1WG z+)Z!Z0>Po*xJO|%tYdwfM~UQTP5x-IXp}l`U^HGc?~RlMeIXW8`s~yuJI^QKwb@T! zJUMRltmq2sX@3p0h9UP0E|Q3c$^$UmJ(6{ay>VKilT2Xsj^$Qoumj@ek9%NM%Q|vn zV4BwCwKEjw%l(CLzc#MVH(SWJ!pxv>sD^cZ^2ix0mjy*%{o%Ov)=5APCcpz2T)zYp z`p-?seA9-53GQxXEEr&IcdzlL-@uOCU87A8z?AHfF@{S&+&$LO#O~VDlti2m{}7?T zzzh#{TrKNYOuKE~oPy!NCGeTVG{Bb%1ropwEwwC*yx5f+U7B6Q_T-MZO0kH>*{duR z{B*_m`8BNI$c=U}TUyy7+W?^q48zG`ZMtaT$5#ayft<+ea2IkSd!G}hzAOG z(7ts-xc0)`chUz=Z@K$<6brsV;2w~VRA4(xB83BWEVK0T?PJYvBlS`zN9go{ETQrt z0j{)CmbGN{;vJ{@pY7Dd?rNgjYOFhIs2%= z!TTUe4~ABvsV={(R<|-_D^Hids$HcX8@W>-XF zoYT(z-*Ri#!!H;^R4+(fQ8xl|RVg$|lsX zo{FD-lo}y6+joZcu5Dj<{=#(3gCSL!fs(~VLdTw}^4{@C%JcV>Vgnc67jEN?S zXcEF*U~Z&*q-ZW^B2>#dkz!SO@{r<^I)jki_izQ9S~0xTku5i|uyWR=pOSpLr6Z3q zzxTj!pyFyWnEtWv^qEc`_eh0C!e(u`K*u|Wg&#u`nXa4tLe9}CQ(&Rfs;2c*> z-%!t5f7ILxO=~a#_)^Si5Oz5A*O|_QX^wR?rmsOrC^l(6eFcPd>{o4p1uShyElVb< zB(`m$`BW%+8-BME<4@}d>9?(radn~}f?t0+Uz?bmr`vsWX)j!|x|X#hPWVIZM8_Q{ zsf~qAyD5h6sAsj;92ngHW1=0vcVgwA6o?C+IUPa#{P9#cyV$ z-bMKoN(Q%o@c~xmH()CzJk}r{KtlTuItkYN(g$tj319K(;%iw)omDu?6SoWRJxMnJ zUk-lnlAAc$nErkOGTXK39+1|F7qtn8CYG3pUMJ(#yHU&Xv%SszSpWSjbQqq-s({sg zCd-nzT*_71_T-UF@d$+<{{#z@Oif7SFDXiB_9zvX-{!~`DJsZT{rKwwb|lC*3ouO* zWmzAxkIK%U^0*G*s(1v!#;Qo8UlPeA|KvzldoW@f@dqambRhafPzjF zIVoRH#Wm$RvcImmFHfDd-ZS^zL-u0{gw72BL0UFkapk7Fhr$UOH@#g5hrHa&V}vBM zdKt?Fhyq-`A%v&$xR)C==_@)obA@XxxUvtA#kz(yA)-d{sNn2lgIF^Y7(2v5u6R1b z17zGQFHGnbkU&=zv`J*=>Ovu11^WN$u2QqPNq97Rkt~zcKxN_Dz26-@;>hgL4wy^Y z9oT>KBb4Q5tlu@`Y3cy@Be~}>{lHK-(8E@`7G**N)U`^XU>&UL`+8Q7q(EgYoH>v(8z`f%OK2M!Ur*NcU8F7)vFA8k0Z=iZofiR`BvFMQxC0rw{EaVD_m z&0S6cE6|)LYgjthj7G2eC_eGLAmjTfN{dK|o2`*VH{bF%5nL()Ye9{D;rOaCznm_; z`GKHe6 zW?{8vU&s2fX0_V3)PgOwtnF(azA5TX998HZ$*-=&mRH?#K?EymCu7bzzyOu4ygf~VA z!2SO({PQjhKZezxmD>biOP5B;mMqZ^T=5jpau%{I_oO`&Obl~lzn$WqfXH4qXp*eG z+RC?+F+pYo>+l)=MAc=H8)3)$!|%ky@krKVZ@Lq>^WLmv!E$WcDr(fR#@&>eJV(ux zl_1M~4sYU(1N>j1=+`AYwh3zNErYivyT8$mQ=lEI592OedXHQ962bCOZJ#_fc!AXg z^>#-K#3q_M+rdQn5ty3IRwZJfu80vdG3J5O~@5m4xua#@zd+ zLbqA-ae=l7miY^PO)uZShoH;w`C}Y#icT$S=gsMwZhc2G^NSPC!lb|diQxe?EXxsd zG$*`&T>D*9%@N=Y(D0#p*7lEzGrPdY&qrVo|O3+ z=)oAc&ku2;93yAZEN~xdYgn5!3#Sx4F~E!2Ib)Qv~xlChJxnwajGO?RRL>ecQCsx#XsT4>)|tf| z$-zO;X?fz#3jVY4zMHGYPM^~!TKU3!1#|$!be^w}xNCW-JBc94$5!J}yovKIs7G+y zkNZeYNY1(*%@xAMQSPH{mlnOFu$z+Y$WFdkJI!*HzTLtN<}6|l!|as|g%88Cpg)L} zu4f%axCdgJ%u1e1ryFM=QpX&rgcTC7xVbR}6P~cBl+Hh}%`&7?Fw6_G8ERSIAKP5T4!6AAl ztdcF>)=D!lJPLdK_ovNX6t^b(;*O2;kAQg%<*r21pA&HA*(@jGx;#5)LhR;eO(yU{ zH21ldGa35xFdwohUx@oCzq*Kr$aZ8GvW(_y^}n6c;<=Sb_OBO#;DolWH>gFrmesWH z_Fl^y&SJiPNZJi zI@5_@-o4CVmf_{^1tgmF%E+0Vq`o>Qq~K`z)eE8WxFF2J>{^yv-1mi}o$q+T;GyaV zU?{#;&=%bVt9YF(YgDA}qLp()PJy0i0WSvn`b>>|Ti1S(YQnno8?tXxZqY0R?*3~p zXTh_0JlG&dc7ABjqRBf)YXIZGJ!A-NylxdLF9No0sbT$gUTDnfe?Q$MUpp%g@6J!a zKqdOd#Jw)@7b-V`-BVq|x;nemSbfO>zo3SxUfOt4-WCDDV{H;6E$K2LJL!#)5blw4 z-*`GJ*MR~_P-Bm#cxDR1i2eij6*h$9^?L+X$eT|Q@a6%x6sx*@X?PAc*0O}zAIz?Q zKAzMO6gO9njnEt5+0q5f5ZpuOo~sZzzXu zN&GN3C*vP~xcs*w37o!_6N<{9jVMR>mJrMYLE?Sjn8OVGed=0-KQa;{L|f(5ps zMlCC|`n~PRqyz*E0Ko*XGAe`a6I-rop^(_=RbD1UMpixeOqjbmQBl_sf|}VitkxG| zc2*L%cUB!6z2zLd!U5i|ZXIi_C)7l;fH&++mZcq>91*?P4TWYb9lwzgrWQ(<9A6XMaP5_l)V+u=n3nPJ%+{Tu?2` z`<;PYg&P%r>$=>fJFG6LPs94~8b{N*?d*4Y#A zm6~+PE}Z$uN)Wt&=<>M;*3`0|HHM#V&4z{^V?sEd_?V|iH>W|SDuLMgP5#abucI(G zkE&(O8ZBCTB42Yn6o6qiVNV0@yL=jBLSF&*?9&G9o&kS;I;~A6=2Nj5xj)oAT#?x2 z%^D_@2OfCy4(ci~o4<_t&cB3(s=|Z!EP-!RL2Q;F^z%WDPEq+LYg(S8(^}@7ch6B`cxeb#0;pzP@*c@5;C> zvz_TD5C(0z;3QbQAEJr1tPQVsd&U_itBL)ZFf$s9G2r@dd=tTZ5ObEbEN)D#=j;8K zudOLny5z(i90Wh&o41f0KYU9T3XJ#QdagyVdh?Pue+yA=|JN>u$o983AsKJ$C?Wj` z?t-;0)qrb{+}7PDbkbMIJkll`ueDQAms*d!Cs|31?}bQA(tZ4T@aH$%4F=t#c^1x;}m48g~Wb^JeY@C3dW z3w3L!gU#T;7K!U^zxMj75#Vd_6m-Pla+Z#4sqqW9>#XFCJ=dG7bw`Yau7K;*HrZHE z1hf7ig5|X!Z+mgjHhZ7dG6yP|?{K(nK_>95Kew%9tH{(D(<#-m7B-vh+;Q;3%Bm{2 z@x5>!)PWn8bYjcg@Z9;)^r-5s?-6@oq_Ai|wmA#3u&Jczt;XIXnX_}_e5ws}9N^)c z@W&S+eHLQZ?E;FetOzFMm0H$yZ^K>PF?(0oA9bAtk7mU~WT9h50G$LIv?Je1*$m?U zl@ToZ*fQ@CmEXS=#5#CBRYd5A5hm!I;Y9z0-glnX7JQV$z1DMNcR%{;J#wakVAVxa z;+n#=bvF3WpFy1cTxGBO8)S>LI$JZh+GmCTVUuUBnjhZb;mxtfaO+CeOMRYi((eFJ zK(D{9$e(-lKQ_r=K?!@}=O-s3V!IYHA%ZS1A>I8JGD*2+zF^(S%dsrATX#?R$PfQl z#BKtkYn&Rp^{mXEtvk+W1G55r1NbKg>RI3A-S^n~**l)G+%dBEpM~hY;zRuQ9(5lQ zk;0vAWEcx_8H(2?Z7>%fq>npq3|WR!cA_#2K6i{eYwbs-=tqNZY_ltLw!w( z3gNU2QgEwdX)hE;-i*vFP-!227F)7BaXCe>=6K?hoCV6Tk((Xa6%FB$32$`IuTo@h z0E1=);NykL6c-F32X=*QZsdhLwj)SH__(+?z4_IpnH zu@0u^av!dOEoBb}r#!a1CfrKnv7nqidCn)c|^e-n$0Eg;CvYM2~9}jz1@BZ)h`Lhbw zf5ee}Y@`11-N$PG8vQyv2jFh7Z+{PkFQ&;c2~Xn6YPfmBlaw@3egFg0Q)jdHj*HoH zqjJr^^;WM3f(^zhCD`RHr4)_S*cbM^j4^5rHwNBVhKLi(lYAzm2hTyK&xvkL#u=&A zPVlQHJgIErHuI!foax`#q2oy}c2d3z+TuVh>mg-$s;$|f1GoZa>BV>;cw|g}x_~n> zGTP=U-k36`juohvb!z0pW{TmH!9hKMm;WzZ3GVk1A;vff-W$?UDVk7;+~$reRg>KXoHJar&BEC zg7LEBgn~<8ZMxU8Y8;1McoinMZuWKEc=)o*7w{W}ZZ)6^-RoJUm(4Cr7+so5_m9~H z0kW^)rg>Mg{M#)r#EL8|^v(^=>&LSe3{t7Bo;8xQ;)3m?HWFI$z;h&)j_Hv)yXsr? zg<0xFzG`bGJ<&j1bjuDJ>YGo$$NGmSH-}u;<;ew@z{9gVxmS^Jp`AC(Nl5>I?L90q z7v5Ci4Y%}x&zgC|pIoB9Ov9NGo3!c1RggJrEQkeL|5ZIJ;7m{QLeaQmu%ubI6J~(# zbyj{1DoaC-^)aL^MP`RYF$CYHWBgfcAMxaO?tl*ldGgIdx(hq)JOxE_cdXkrb}c+d*pKIHF82N^mZzTy3DPCpgf!u6VV**T zP}B~*-Q&pab6%gW*?E2CuCVv#AL045gLPO~!)o5(mClQfd;B4E#(h{w=rZ3xlg=uF z%o=ayCw*{xK#e`@rsC!Jg8>_%0k8@0eg%S`#`L>DoKc)PA0iSw#eEA1cPSQpQ{E2I zLiZZh#l=$Bo(B9}H6_fi1Dbe;br?^nXuk4s5UgJHEcR=wYX;+2E1e1YqOg<}LxoVI zKuueDH26YtHLSXgzSp(~9(bknsXE4&g-}%p5Wot+ZWEj}rU!mO=A$-50(r{A7r{Lz zp0b?~JV%~#P#OK?4$i1JCexRJQkCUIm&ie8t+wtaQ2XcVS+NTRx4!$S{sf-(gjg>) zRyykJREoz?4mE-nTwh?m+yNmehHo^ zuM_dKg*IX%tJ-amo8xhyopB@#D# zXC@HjU{8EHWWIZo`BZa{1h12)))gq22dBBuYFJ|88y*a|d^=WCZg>9#%+j(LuO1`R zy$+|k36AVxRlgrZ40r8NtX1s?_g4rp9V^UtYH~RdZ7t?^Z!9`V;?u{WV7E8X-s3%; z>1(EtdFe#Abl}W{DdZfOH^D(j$A>~DycCX{c@tes1k-=vI@=xD7mF40&2;`=`&TH* z06y;r+^pnHx?w`s0}o94V=U;G22EeXl6!RPk<6GkcKtVB=$kR`kMJgArh!3B4ieH| z)}2M(=c;_ueNv^sXpMW@ifOcgGw#xovtn`plu^f8+mAfUgwNeNH%|jkW$JNU^y#0B760T9$HX z^0V&~A0{1H^l31YO-?{~+Jz>{oLO*3p_cWbQ}_AAcM(FnIjL&(2rF;kE#Y`4`0j@% zJc4DKJ+?T68PrL*O~w;hXo~iNUe=LgS>(6BR_91h54w0yKM(7v&Q!eJ3u;+$Vtuc3 zFQy)aNis*|b4H#hRa5W|bW3gxE6RO!iG}(y`-b4gm2hASmZ7LsC};uu;8iV46SbG@ z^pNgd*q3!}6!X3%PggGzz7OH)77FR>pp108wMF}4;4aKKHr3==x=*Y3MCn1ZSeTTY zo4BFcT2|vtEg@Y35<<^0o9GaIC80nMB${d+OY2ooMWD=qM9t)>X;9J+@GcAJy!VhvN+3LI zV9LlZ178!fLfB8kYgy)@gH`%tW9mT+!{M(Lg#D!**?TFiYV>GV%atupK0}4C5+EAy z$gYtyuKxP*fyAo7CF9}yqW1j`j$OpYK-YYPyj}1+HTL_M6YEW`^R3f1{o82rAUANM|s@cp2rw& z@5ufE#oOya}(zP5Ah%z5>R@texJ11`ujt5v(+U zNu!zYS9)6H#IbM!xeMcG(~*MN#OVtVyd@+?{ldlt@ZJS)(FCDj#8e3Q z$+3R*IJB-4pA!fh=LjNiS@0HZF(Gc}qD)Qt9!%__k50t?GoEikKLS(2IMi4$-43if zM>bDQ*tV|it>E7yRaL;ZVA~(`jERaiowOD*i=F5qJ;+?vhF~$zq?%kA;hByT!WjV1 z)Y{qXE}Wg5RAY1hinm)FZHs-SID4KOJvg{Zh~X<_S#CYn?b0(-CH~!&P9Qe4)!9l3 ztJ{wb@b}NmT=l(&`8<(l+N((?KaRo2b z*a7dibsU@jV7Q}i-Cnq9wgc{#rpmHD`W@(yIk@5czheW1XZA`6A1w0B2FRDxJaca$ zyr;o4Pj*&50c?M$mgSoq_;cq$zuR_aG>m?W4Gw+_M|bT@LRHCP3%mk&mt~ zcnO4gNE@oP~iiQP&nH$rFpbB@=}7cM!u%Rz@g)1k3K0EGzu& znct#O8f8hzEsNaI6LbZ*Y{_5vPynx^yS|>xel5F z_oT}Nt|LdVj#ioWxjN73Dri-D49{Wcf!x?v%gVJh>sue*Fdw>n1nc|T<3YUEv7-D| z_gR-lz6P#yK~ivAg}3yW2^|l8U0S0}cZ2C@C1Xrn04r-DMgD)t2X5W+ta=rR(polL zs9OUP?any%8oAK}ppINq3TTlSnw%n1OcHL@V z-pYH?-<|i=!4kvLF3T1Q;=%RfN3yE>8V5XYKb~}aTJY2XT)_#fwykMuG`yL`vl%C% zFF}`VmiyATwnOH%vC5{yWJufu&-^`xWQMf zJliKC`sQQE{LxWa!+cDdC@qo)#{?hYF#M^X*oFqDb5lHTt5+iXXoe$@y zyp@F_qF7hjk;y;~&cMyJL0m%;+;!v-$62T!MZ`Glxba5kO?z#}6Yp*0yeaAeAG$Dw(t9fEL z-|+md#X6JT!V55OAq-`|) zB-%$lQ)a0nl=m%rgy_lm`YDAo@@*GLqT7_j`S2l7c*#v0ms@S;N1Vykr9!yP#dDx@ zh$HKeCx9nUcn;N%=!^q6<9MN(n7K~7MJh{_Nw2#i=q)J%<)EXJ!#myX+fGE}Rc4;k zlTf(z$#b@L)!%rW%#m#fwJPb2O4sACVZ-+yqrUm5=%`#K zCav?A9J)3WXIxUE;1L&|>vUK87Z`G`XI5z=lhju7qSx#f!B z#F*#SE}&E~kFJrXMiL%%pF}?7<++!*Lfo3SR@Y2bL6Uq?`hJ9%PFwT@`=+D!#wV}T zY{B~`8yt?crxwEJHoUd9j`|rA%nv5Wi|skGcVc$DFlwd9fczMMOCcHLKq&QJ3|!Fw z$+K?DE_j)Nx30;IehYTqdZDRG`U^68uV5bX2@f64K#elqj!FG)ie#`{gC|AC)-d!XVFcl9|4J z37O-lFzSA%m~LB``VaFNWrou9W~z&PHY?#7PX8Cjwynf zpM35bih)~lj~GnPPz-#_-VeO9Syo4{7hdW$2%;XyEYr2Uc&l{n-wQdhNPreak(E_6{*^k6({0rP{rqrJm0TEguDEH&S+O&z>R)5xq~KD|0_FkHaB6eq*BP>9 zCG#Dn*NB0YCyvW*pJb+z6GS>{svs7)cBbJ6VU)*NpX(coC~p$h*&ZfP^MIhzo z`_M3dLGyFAacB1!ib7A10!h9O{H_Xbd#9Od!Bgf@{BWwauxw`W1J4w(_1$m9@UDx4 z9lT?u4xL;HnP6Wy$l&dy1y;M3kuGXQP-nNTH&X6Qjc2QxRAfUM6SH+Y z!%#wEzmbF4FO8FXBEF2l6e)Zl*{-Fk!u)zyse)K@hwrSoLL({erP>yFspQ#cEVFjq zqjX6q-@8>rTzj8Uu1i^x%&(u15PLGZcy8Iu&7Rpu&ar-=>F6V~I1Z8YvCRGxc)j?TB^3Lok`RJJyUqEDsriNyeC~ zQe@b@Vf247X;?Na!87IOKuM8s+3t0M4dpVV60IPr`*q_2%LL9*2Vc8YvUoJBBMIh9 z@DR*ryq#N^_gDvpVhS;4?ni>T5i$hh&L*$P@OFO@++8Nke77Asgdz@?HB@4L?0qQa zC7yz}8U#}jHUzUu95cs`e+1bO3|v-!6#36HdP6X?aG8GQL6*7U^nWodTqgP+)3>N0 zm^3MJy^Xgg%LgtT@ceazaC?pC@8U`~kATc09hH$InCDKy(Bt)M7M_avxLn?Y?Qs>6 zH&1zcc>)?PkMZ_SCOr>-mg;^XBd^~6|5a;LxA+r0BALg$%%&Ggnf`fb>c!G}EV1W^8hq$7 zSC#qs^Zy?+7@sjy{-1+~10_?4r*mMygubPMGY3s{=t}UZ4*L1hd0>(s{OPiBs{-j; zNGRI6C&OZ<`B{FQ$`1~>0lwc!!ydtu*Zk)eKOR56)4$t#AH+L%GkiZPoPLtGbIHT= zxxSPn=lKdGi!w7AvG%{1#prJ~p20&ol-}XB%p%{3pglB_E!R!E8tQhidDKF@{EKDC z#%~{ral)7e=5-&r$RU``7$d(|+&zSdzbwa5B zVm?6szd)5w&8IHh4$q*4QVll_TJ>8Oh$mfna+!~-j33Rs#27vV1BaW-aLEAUDRdDy z3<0}bRBkAfacM`=AFN(j?W>q>DO?_Mdjj+IH!q;wRi(q4R8|l{uUqzNnUV93S*2qN z#$OjF-_-L0JR8z@uQ+6fP-E`KS=Z0bpD&Z_^NGe?4wv}ncj$*;>Toj? z7{ezaj9_+uj|L6h*w$=$*L>DZWcUiNS1&VSqsae*0kbXGJy_)_ zBfUrAnU^nHp*mqq@wgz}`edX+&geoU1&62g5Q>1XAr$_&1DF0$(t*^Z$&xFrY*n9J zzoIw}Zi~%hT)}O@Loo32tM*HRDGnKef#COjM&TKU4Z%d?o`5qPk|-d?K}ha)dT&+&W`G!*0%BI_iT@*;Iy;S9GM~S029p-0-!U@JZB-neI-y({9KgbKW zjfPw5yzpkh#tpWN1`MZ~hMip9;C|wg%X>{Ri0-bvMljal^nWpMVMpUW!E_D9NFWYU zVje2p9W?|aj#X0&+;oW8V5(wFo_LU`4xz5i>|R~1aIe$NBtXMboZMdIMZ8$7s$xSb z(T$)+?9_LhGU?PS4?ml@3D_y^OTZ+NQ+42n$9R$Mi#GHz+GLD^*s9@s9NPwdKbCQ+ zjfY=}G z6HeDRUU9zursA#MBi=+K+-XqgQK^0;fDk=yk%~1VeHI2%pX-8MHV*pUwWHSFpMmQD zpF8@HKwo_$sq0JF5DIvWBA3Cmgb%@NL}TCsFkJ5O2 zU(t z2*qVQsCSkGqZ2U%^AlrUGZyomP)hvMx;3BWcAsggbupO>!CnJqT|=oC9{FpY->a4A zPhD=di20PCcRW=<;!wv6T=jP`PqgQTP$lLu?%}7BUAn%cJAp-2CGpR1Pf)i9Cat|E zR6@~zJtaVEWdzp*m(~-m!JnTQBf8KgnZ~6BAA8tc8%-PG0Qbok$QI zxjmWG>ODg2#EQG?LvyO6m9NdU7={(UE%WwP{0OmGK94+3y$?{n;w3X%4|R%tJHq^^ z#Q$QZqlHHars01vKgHUZIEbzS!F-ns*)na{T6@66LCp8$98;AsRb=Q#!cfip-gcbUx=db5y z<|4dX3jTR;Xb5H=Do0}r|M|^;|G|t#te5}%*2({1+~F6gc(HDR!}Hq+#g>TwX>UK; z{)b>vBmW084mmJCP;%{mF^f?xgSqp+7#is^kC6TTUyKnv=g5m45YYn=X+146)%Vyz zNVkj-8|U!Tb9iUuwe8nlE$hajQn-^~yh5ph!}^<5XP?&Eum%mkg9i!=a{8L9$_M71 zq=6u6-6W~4p=KI(m%VqNeuc>I9P`d-FuvOv2Z8Fz<&2#kLXDlVZ|kU#kjB(vj}Eve z(E#@(&P40LXF$9&1>_At-dT05jvxYac24CX9y~}D8;TYM8|;vjR+w;kR`*#D{)uhO z4;K!_sKWyWi1~GqAnLyu@aiI&*aRnJ2nOCezVU%zZVkminDFvn+>vvql@70pC4+bn zK}X+Nwj;>o^tI~&oQFRp5E36orYbBHjgC0DqkYB8`!NAl{+_fLy24n#x8%i^?Yp`| zCLms0VyeahJ`zV}Pe>WP2zmp*e@8;rP4*&pVU;O%uRL2B!4*yc#xJg9_r!Z?Q}1vQ z32?>f3Fx}tkXhQM;uAr7tr|(6vFY%xjV;+?z9II8E$%gJ`SW%nx-1Vep*r*v@KMg^ zXu;dPyz?yr63DtBE~1<2AT!N{XsipdBGr0ED4YpL5~74XJrh4SXq4%7fVEozam@># zd{ri$U>G@=dj1IctIS;yRqDBq8%{F;lXzb_P`kK7q$2r|Op$LGbvNYIzRB+gL>669sv%;Gr*<)6k;($*M=u8C#kl<+_FWQ9 zzq`Br)~siUybs1ppat|#h?^vgd;mW{!b_+oKRwR7#Bo;L$~ePqgLlMn4LxU_NnLCc-$-f+MPC!xZ@Os;7UtM;uQKhT!=|2vxf)p zc&XYVx*KA|sjlWK%DPO@Ae`RWwLD~A%ESuW!%EKZgewcXS*cGb1#)dCt#&(!f2UhU z8|>wiBCX-5rZYX_s_BeZuY*Tjg-gjtrV)%s6#ZWeM8+Fn9xf}8U+Lmq_7)h-PE8uUs+!s2-Ww-eb2ScP=;zOsYo#)qNFk?q9{=sS<+@}S1D_|O7jpSeEIxzE|3?VNkg>ow+*&(&wc5^fW-Y9Wjr zK(T;*?n&_Sz3>q$Z;>RO(Wqj3M*Irt@jB(3Zp&PdrzrRYM*h^w@pSb+ct}!FYi2Ig zGmJq_FW)Cbd~=(c_3F4mCx>j^l|OYqcEpJ`#wmzE$CsBKDlt6v&NIa;0#2s82eJ3l zn^e@kGghN(Tz``HwX0*#Ao87&{OJ=$WQWn|mv$rue?~){mbdCh9_a)M!^*(E*pumLKh}aJU!X>%}w2HDpUg5b#kUcmCk^rzcXKL;i^CR zv&Q*q<&1;j6osa|pLuvBXXf1akTLbMvAhH0>TI{2+AJobnh=eiOFAC@lJmxEy#MBV z5T*y)_1scbvdDTaIFfz@68F5sal*{V)l;$l>X_M}9UdrFI7xNhne;>X@5qxC1Dg zaf`vm*$Y@i$TgC=86HJRaL%QVmwoUkyNt7cC&er=U8oD9FM}&^(b*6Vpp?HDeh{wc zlfR@AG$NBRa%*GIy(5mDv`o5p-{RLV9R&6fj8?ID^d+(c>1VeMb9UtZTPqAeX zHqV$a)0J=vDEGr+-fY{q^>4i59+nKbJXt0%5_yc;m_7DU!s%%9QR{oLKHbasXohk0 zd)>>~r<~88`#M>3XRstWtVO=SN_E8BX-ot^7U}*d%07Cr%w*;Hhy24*6!maDo<;>h zXcZaxg>UWXe30Ik^~Te;zzn8mtG zu1l$?eP-@-^NXiXpG(LC+2SFw{Q$%@U~kR!sW$YFE_m1(Z?c@ZLT%kJiL0MePVHQ6 zmhn>Iybz3Y3FheJ!dTSGwK)Cb>ib|(-IAejbzudlj3S#^@Ue}2QH-65BjXEvh()vF z51xK*($bo-x?D;VCrx4O9;f|i=I*1X&Euo9{$8+NCMAKWPS^){LoS}q0YmFXhBe*3 z3RmRDd*nps@GTQN`U<$2w{{!O&j@2K(cX<}ju)J-j$i$5Pi{y692yj{gSn^HIL8a}M+qr?b{50cnZ;AW=DCJQ3VziWga0VV$ELT%x?B(_t6fkDXCAT-XS_GXs zNg&ywl*M(bWHVcF2cG%R>bG^QC#x~%Gp5BL&EQ*wMS6y+XHT{*gsl{IjK>=hg{(Py zFW*e{-lIJIT81U1NcLvk&b~_I${i1KC#L+JI6Jxrg!P>oL%P)$H^E&w$(O3Nhh!*D z>u;q&FAo*%Z!ao-w~SZ(27>K4Wm&hLnqMI4or<9n|C_>xIlx^0{tVSWNXkB$xgKxW zfHXxo%|C{6ZXm@STU4VNTWbV&0L2&fM}HucSpz9>2hl5Li6VPH^}m#DD46>AMuzcl z2T(>(KO_PtG0hi+{+F^E+xHKchV_pfKnbSusPeG?Jo*8YNw{n;7<+S2Ed9Tf_fpIj zfCo!e_iVmIQj8mz0*<;XxXd`R11ZMTaVnP4Il*;X)7+RSq>l2 zf~ELq>TUXq93CEz{-XPn3z)Tmr!h!$@7Udv4IfLtO7%`m#o0VuMJS_U(WbQLcUQJg zrkqvXawU-@1;4jxom{Rr%4c>wv{I@?}|n(5ob#-80&e@Tk&ap#hZF$0hBc6LsR*RQ>tqE%yo!c_EQZZQTNq1D zsBRR>yx8(!>lB|G&Zn1T?nV;S6ei|q6+3{UjWu2Q4*c@7Z-#2q=aVE;W6`lr|A$-8 zE>J!l@0ON`Uj}2da;n{k5GLg18&7vk?R@xTv47T%EeR`BuvIJsE3h)dknH@bd}^eY zo5|!Cg)~DgAD>?qY5q@n7gWs1$kv4#hj9WZw}?3pHX$RVodsTF#}R;!zqR{ zuBuAh017yq`An-BM-ocg7#BOwEGI!QMJ|4ouTnB1o9(J>RiEiHs~VyIH|5~xf66(y z73&zqJ|t=Y1p+*)m{n8zm;sdOlq&VZp5z-gkm7)wbKZjgX88chC~WuVFd?RRrUmxi zajk0c$NRYv2Gt>EgWSte`t%Rjn(}g< z$7D@LI8WS<;_m&bY#u-TTGqY=Grh#f%LOpT>Nn9V$o2U2XPFX$tmP?lmR_kSq2@!W5DfU~PMSJ^`|>zoO( z1CngTNd)^CSvHUoiYY0KSf3M1|1Tv2cg<=>06&f!K)FSYqqdOSu4F=oN{A`K@1G)G zBXZ!Ye08;rTIJgRf(CS2-Om^C)@nMLpV)2b5IMvtYNjq5agcGi_8y>PHg~@qf97P! z$mhI0xwzv+U~ku4R3Q<7n#asG#+sfbYEkCom8EfXXjI&*?KgENca+wz2&Bkq(ekxh z?1*<;d-DbT@EU%2ozzOUL)3*EO&aSDcgsbh@_6fp9moCp)LQK!QbPT?K^k=(wlG3x zH>~!vH}HOtyKsvl6P&ASUf~$T1lXJo&>Oc;dxM(!qi0l)^?{=|I>s^g0Su&^#gr!I zwr2VOeU5eG&Cj$JpF^3a><<4YQ0vq3)Pk5{KQ$W3eH-%jqQTwH`7^dok)b7tv(>Ov zfAUl&lDs??-Sk-aRz7udiH7CZJO|8U1!H-d#0{X_p&3`Eq1cG*;cjRih`)K(OG2=3D^{qzWAt%F6bipr{h>fX{`{%S z(YY`j7ULA}?2RVBWTeTbSlVF(^|4{f=V`wu-+qB$hYz{hPiAb~|3?A6{O&@4n$&+W z|6vkiJT87xo>9AdGAHCG_|mX`*Jx!&T&l(xRr(U_CU49=N#BIf;CshebPu>v?^6uP zmyYl6=fQWo@*n&I91|Gx+I16UK7ZTSMpKE|yp1!Iz=%G{h>((D5?d~(ezmYKl$&?a za3}6u%BmLL*J>ahIAGhy9;MevWz4XP7kP+scR?;Bq)TO*wA zTD^(NmRirOx-`Yo|4Z3|DKHN{%NxXuye3Q44S}EhI8pI$! zqxY@b&MS|9^rZ0i3X0s;Cf{^!g<2Am@Ap_V^QGb|y31I^%q0}*C$rxrw2U5au zA?J=~MadRa^IG#?)k9hKQnzm3h-RNY-1LT2U4cvT1;RRc=A9>6!ws-6A48$kOEsv zUNTYHa|0>u6gjO&{#%dghzZPQqK8qaqxtA>C&Qs-kCT$LS5fn*j}UtJZe$G~<;#B$ z4590`!Nbo?INVVFN4yas{J0YW=ibS;ngy72F)q!xXr!>#{_l~+<-S$NeCJzWWjk7d z*?|x$Q<)WFgD66=h&Xh*`QeFMu&=5R_Tl{8gGSJ0;F-4_GbEp6wN)CKWb7m_l}4e^ zsN+4uU!9#;XKnV>2)}fYF^?ODNyMjJ?FrhGeE3zPw~C#N4AuAqzx->O3jGB3;rv=? zOaB6g_*ceqvV-AQ?}W+uOtAhXw#hcn=>33-s7ai<+=*6vEg=r2w z^aknoigg}+y+8xYzuPKHcgbp5%KT*kTDS4yP_5+Oa19Z)d7~s{Zm~x~ZbJAlpI0wq z06RSrH|<|J)S#p`6TdNIkA$~Uq{{LI6{vLYuPCLl zP{j5y2~)`l>QCE;K7mvxAywa@DELT|LyhvHJ4e979tSw;ltWeifWSHj-Qnv-9l(`0 z?4h&#mhm=jtf+vuFZp7O@{!=5Sjl@(d#envI$!((%`-)k@axHkkE1zsrWXi(js(r8 zi)GH;MISh$j$@phKIyB{KZfjBlS9 zY66%yq!)RC(&k9Z|KOI@;`ds^BrN!HQ=i%1IR$CrFimC%5B7L-WK6waOB_cgGYcY> z9F~P6TolM*#Rb6;xE!{$7u|6H9@b{U;X@o*Sw6@Dj_fQ)`q)T#NYRE9Q#rCX`S5uW zN4DD$hAT&I4iADt96A5jur9+H9DR^ApQu1 zH9^i`Uq1Kk2sz4Jc8|myvu$=`9aRG%(I}u5dcpY_9Qoltlm^YhjV9=!m4CgSH%avp zX@hJiFGn4B^gsOThgK@Z%i>g{_DDz~gM|w^cNCE9-eIc3SEQu0eI(AuxQD)d_<_sK zTxU|4$57HC%C&GmCuc~TBe<@dp;ke#shu=Q3~YI9q-^Ft5)f4J?^I7Fp1*hx~r-p zpPRzVFYy58C?C*S!@MO1pU@1<4QyxxOcBz@ z@Zk$5j@o@M`VZtiLZ6%l&KYsSk^V}6-==Vq181Zm4?Z*EjCAFb*)}qfM++hAkvB@| zZm7nneWr9HdAME5y$2cU?vdz|Ne$BucqAjm`28I9bT2X~G^il>teK+`%qM|Vjz$40 zq@Q8&!qHrh;^+&&Xv{xtu!A^b$6i#jhndsuEpewK^I~D*C}zd zXGEj17|?!<^x)DS4zDwa4C8Sdz2LAh&bTTbsqDBOKK&K))R~4t=}XX#x&=}2kuFDX zMizYrhL+wt2iRrK8E+T_E03JnAR@jd4&uSZ1DuJ*S@e4ti4$LBLOg;qX^W#$1>_~&LreT>y)D@odl2}( zf<%*Qw7KaZ;Ih3XJc_A1_D?t?q!_P>V?68!eH5~qETs)Pj5B#W?o4Qh$<9n~PmXsa z?J@an5cf2QrBB`xt!Ca2UTm@+4qQMQnyhn#Zx1;pMPA%ynCo@k5?^9vAA5&VQy~rZ zEjUw*qv5gw&Xi(DI9!`ERe=v*sBos*U!*Ib1k;cztL=TjXh`4cr4$LpH|e1#kJ|KUG3U}R zpjAUQ(^oqzaDXxtd(nqd9(}Ictxi?)NgwK z;lx6~cCNthHhN1uIQpPTa$zO{Yy;*ygB1jEUCxYMSrFytSZX+$m#o5t?hKR2h^Y9! zXPM(mNP828Z=kott&LVcavfw+0DDn`^U>|04+iA_&{s&@DaCBV;aHx^B2(K6PYDzF zjkw7D03&f&4?X(&l=iLHl1SOrF>U7jAg+8ord4@MlrF6MeSEUzcA)WqR>yYR)WlAX;D=9ss6$ zB(6x?{ri&`7ztPsq%V%tXG{3P0Tb|6H*<4vbo0Cvo$8kw9y7)aw8?TO<=jHXI!%A6z*8QQ5WZ&*buRN9k z)U}+tF!N_Ws&I**%SGt8KC!vTpJOq}EwGc?_R&gOj2>1Yd^?^}FFJ4ZqZ^kK4#Ud_ zpsu?KI%go-a~fCDwtU7Pqm=RNi`>yQ~MnR za$;0Ajbvdw5G`g<(}C!Tbb^`MFo&SF1JPrr3F_>Mg zkWNu3r}lPz?MF`p;SA==>AI^OA@zFLNzJ!u94aP-z(q7zGtO?~)#U`w~37(^Y=XjMmCdYWk|X2I6F3R5@r`S}$)V)RXFn zU+6PYOf6=*BC(TN(;d@~###|{P%?5nk~B$-egc8)kgKcQ-9=bY<&;9qI1@1$tRaUc zJ^lP&`sz-dM`6Kj{l`fIwJ$Y!Qed$bb@;yLZs;B&QCo||Y&~c^b zv&2+CpdPh^-4~qsje2ykHGWvY(cO1#F&RR`G>}Bl2YYdHPiE?OC!q;{wC9V_F~119 zRD@RU(d|c-7>V4Lgicp2>PHot8C{`)c5o~E(V7H;nra{|1Dyq8bP1H@rrykp%xn(p zqyknaEfiA=n3cI?i_m+M!K=jN zjtjtZpREK{H8Qy$<>3MT2@ijD^yb=!e)N_yuu9S;27sVgPk9L=k%wy_q0-NF;$$#* zooUONo~EQJI+M`tIG0Ugs^U-bPD>Ix-*~7WO=MKLiU!(v>9D^T9S5Ak^=2;qPB_h1 zM~_u|H;btvW-2$Uqufanfns#61>vMoPHF8K+>e51{Una^sM_)M`mWEsiUX#?GQ2g>Lo!9lF>u!Xga8??+!893z2M-)sfnrsDAWaIlS~)mp%eMxyw*p`s!MEn67KEeLG>Z3(MC& z{vPoQ+nB*Cse#NVo#{t2z;kj*(REXIA>`yvN^jYoa534^nxHD>RC1TbelhxQ7a1dh zfoQ-vg5K|>nvh373a*9gaJ>zU-x>McNoi-Vi563{KM*ud9nHBc>PKJSBe^DYQsx`( z#E4M=BO+teQI=zOY(EDk?FbFDTVc@wF$(^n>m@zqC(MwjPC|bkclA?2^tg6dx~DL8 zuIQu+&&DN)sal}~ZBIg*gb(^rJ4T8aCnJ*&75(ThMrMVJ(3v$xhs5Y_Fp1p`uG4p5 zWWAF*`qX_lSxkP(qOms59xFZH7^-WFpyKQE1x&7z!Y7}s@7Oa<3`@zM2e0gO>THW_K(xqL!Q zUE)B{swA{P^L9VF;yx);R44W2(AXR?I)RZ8=hV^bQ8W5ch%b2f>M4I(M5y1@QTJHt zq?oD?RH2^o+K+@0j(#+ashO+>^1Jn| zALXnf=($cR-hI;!j-UZ#gSQcQDgd zww&Sw>Yfv$W`U#^8`P2CuekGKw3{h!VJGz|;N=A|I&}%*uuX(+rW#%pqoZVqS~N^X z`3g<_=zeArjMG53E*lq!(Y=hG=}kg&emfS5(HU%#c0@9|V;9nozOf^yzX+ApNnaMD z>u^gi)3(%K%}ntMb>!;y`ns4pWeDLPj7w1r>ppD_v=3R`5L15~ zB=4}2(XWZu`q2{#BzH3r(hC3Gj~1;V=*uKz*JycDjFvMx;EFn$q$Y7oj84P3E;q1* zv%xsaH|m`aSWZgp-$}`?dw)lqs+vvE@#R$a&)$CY++oryiw2?%A~ctCq92{~k)+j4Mvv#K-4mlLmoRfxgf?pPO2z0dT<#V2 z>s6j^CMDb=LdRYRABd^1x`F&mm&BV_^ys-oFv{(kI?_`0dn8Wg&xo4{5z;@I(T^H1 z&T6ST+NWCGk1ifciaahEdAwpj5u+BEzvnJJmC=k}GNYVYynV}4G4)v%knhfL-@y2u z+R143h>CJCSl@63De0tA?&{Txso#SLC%T;Ks+iayMvJx(PQjg&chCHO^qw|m zknmn5LYlHc@5SkVG4_T+GJ1RBem|P2PtYOdl(XPO##N95ZTR(5R}$-eU`^=j7j zezN9Df_4u?gH{o=XCS)IkD!Cf2cX{>xj$?m`d|k^M+`(ahY@ttK-6qMK|Mw2aP*UQ zF&e?RTcIMf$lB+(7}deGS!HE;zuJW~aRl~r&v^b3Q>RZRP4HA5xj)qD6r;V0ByIas zO5*nWew51u&05vbIm3fpV$_%6OKYI_c4xcAs5cW3v?`}=f0+4CjE+A_IH)9}xrZcs z#i;%i#!Xa5POeG)Xcoga8&9e33;pP6ShZL^S64ZS5sjVdsP08$pO_45c&lT4=vG+V zTm8!u4(#KsS;W)%DPmJvbwTVt^+?5&&3g+d7WN-b$WmIFGVH_SA{E!r3Vdo1*-^$> zbJB|}`mL1>f?I?*Yr}XXBEMF|gZ%)UwUv%^H(0dm#%Nn4{K1R`LY8ZUGWSgJ!hpWP z6m_$JMQObZVuu}8<_`MZHx4rMKaMjq3>|p9JO4}3-1>*OzA7wsRCD_9d$xOj18*?>HVcAaha_k7V=uBuxW$l1=RgU! zxH;%#sNxI~F3JQchm0OF$exC3$WE4_w5A5L59%b0p76AAPw!yHP1sUVMix#34)f7& zT%&+0qzs#%IDsvWa2F*f$jpnb{|67pw3SvO%-M;>c8dtr%Hhpk1I6irP710w)vc(& z#Nq<9FEvSPXM~v)5QPqi(()Fvc>XC`aw~@}0xTyTzYuI`YLPe;zc8Q84vi9NojWv5 z*n^1`UKm>w!gfr3p=CYz^jCP{%@>Rfkzt<{t<;`&DE~fSZLnAveASa1{tOd41K9yd zJf2JRvXf9*SnUWgyeiJ#8EON|g+ z4?Uep){nQ^^XTg^8n*^`!s$Sqt<_#SOCI94R6^LU&Tcw0&PS1sKHY>de-TTV;HGnO zjZ_+N+Je(>^LFH#3-NHWm1HX>`Rgo-4V?}&@Qt?J=aEj^)|!Q+aGP(B$88QPZaai! zx~KE;OW#>+cjp{kqrcWy;ic&L(4j(>$R$U&ZD#%{$oO+1eqm>-HeCfvb31RFay>TU zRJ%gh4F|Kvm-q#e&QZg(T?%}-0F1NCp2u}Og1Os=u+R3j=_{`@t%fuZK-d+*qjRAn zcb)g7zr%vtZtggp1u&3KO0)BfQYJ>8scP!tP}C+GV+KN&&X!4&WLtd8fdedF?e^t6 zNn~K|cYIJtko%;^3aREWvfs$#5il;0og46cQu)qA6G&gS9!eU>4l%Ve<~a3b0Jdcl z#^M9n_487VkJuQT0qh9GPfKr{;7-$mx>GEAk9wA3!37rpIr!zCx>~2U31hm`iG}dvNU_rq^v39A8&sNv=kW6 zJ8=4>#nU3}Wdi_v2TPoLCYjQ`PvK#MCw&Vnz&+1BEzW(%$qQBJh{y;_i7|20dgUpq z<{YlyIvKWGYL=ze>nCf0#bPc_=gJ;{eSww9y_>bU|IBbXvSrwbiL9?gPA2!L*-L)h6V0X9WzcI5T3DasO?E?NSPgfKVArrTt= z45dI(tITOi%UXu56B253a%or@%%+_qak>^ED>o&@rrYf3ftJY>^#=^eeS8Nv%aF4# zijQEp?knZfk00TOa0v&p+#c@a2q!0U!n66hH00JH!H!I7pM8Jz%75Ls+FPIFT7>1X zQuYVfHlS&n|Ki+k!P3osnNr>E9NY~@1KIoYTGwi-=+oBOdkmLHC6t{}Q!_7TxO6&jeen%* zyAaH-$*7y#W*nud>y4zxImH) zk}fIkP8xC1q?!wiB`K{?8MfkKf2U6~mlJ8R5Yl&e(a&M?MeO20?oGBFrL{4S6%mo< z^k(oCEl31nD)tkfu7?O|+!$@{yHmJCnj%(85ZAdpbJ8VfYS?xVx73UKp$b>!X%O2b zz{YvM%hr5g5DT=IuXQhN#x#>aHrIWz^JiQAYq%pIGq!2B5r{5Z(r%{ zFjyV|llYZU+?aGsd@N!`B}F;!Z;4ZdTw$rTF!%WjyZ=S$UVxO#9@3T1*pI|5vwM%#`#dyhA1fG71$z^ z=wqr>UWZ$%SehN7+wRPBcl-#%0*H?#7MPOGOKjy6@t-u_i~Dp3jNo8)u8XeAsu7hh zVA6puPCCI?syK-&(Z*u$^I7DQGW|+ERN~!yApX92rn#wE41uBwU z#Hasp;h{^L>--WYqFC(1Lk_xZX{uTciD3UmiZNe#h>a{YmDYMIV)b23b!lT2E(Xa8 zHbP1`UrD+Lzj=qnKAV^867#WD{*(tr-6gNS(uUjPIVru4O6WH({IyWFZnnr}iYogy zbisG?P*u>*4#QL+?VA;>v; zY61NTjIyJBp7b}EVaL2o;c!~cvCJT{Q6O81PZV{wlNaov=VTY^&>65xA^V*JodW{k zxREyZJt&%bEtRz_-^hYZEm1jcsx!YdDfd?F+t0Ud~0v__B=<6__^CoN4 zbuhW|mS}TZL120WvroBPUl#7{I~%e)1hm7BXgMCDc}+-<8x)K4h|go`Xr`=8eIsoA zC&%n2$sfn(Ucky+Ym10=a%JeMZLvSqAb+sR^Mj-5TVPe4k}`#lggK{Xco7@!)Cn*1 zjo?362eOCfrfu?#>H7&1b|*+VUuIJTfd_F5c=M_^W;J?~8-gXi#Mf?&@%Cr9q| z^SCc`g{-331$*XaQ+t z_R8`QZkZ8gXpfuna$?Y~&zj1>5VjCs`Oc&7<>QB|%14zNvGm=iiQ6sYaFD@~k)x>* z6jg5k#3%FVHW;DT)B}lIbuF1kzX4@@?UpBf4m$k$7*G1?19;f)Ni3i1&C&3E52q;5 zk-iG5rKn7s{srD=(GLf@6(;J9>850R-i^>7O0T!!0*()4=LEF}Ump{;9{T^!dQ7~z z(}C=MzWI~~M_+Jmb~wVR4V+uEcshNc(Un5kUGB~y3dfJSL9KGY)Ekn=au+#=1j=f8 z5;FzTC-ampgM)1n#5PTF38_pgtON;q5qk30b5pvZ8b1_sOm+N^w}@`Q}P_m_fFP6*N3Di)4%I{f&HX$`JgXnUK{NA`0zpt_}g= z2jb~|F^|3iQ}X^72d?U4+_K{&*(q){q0<^2+kg=)r#!HaCZgy;H5Tg-g?OmrKv(6! zLx=;M9M5@pUk5%bN~NZlT@Q92{?%+(+#Y!g*j)e#`e>~-i7`A% z$bw_fIgcj>>DX3542i{#RP5UMz%A}2RLlUTnkS2WaDUg%ATw-Tu^7woapgsB${C#g zzKE5o60}=wPuOTm5eo!NdyL0q!yom_6NhMybUQU-FpRfl3=GL3HO;7 zFI^4;?eBB^<}>wD9f`4+I3tzXbl7?S^9{i>;LVCCJethd>@cD8{=5&paezS^RLW1G z%V3XwmAJD0vecu@ z+=#j*ua-c?j+)_&e6gn^%8l5c*0cv&=@~dUr?9;%%!^q4;hlhJkrIr#2eO@$L=j!W zBv(jRxB-`=S%w`PayjDYv)XZh!H(o=?2%4|-#{J$U${DxulpMU1=S9cZuguccQ4=% z0-lD;u>)x-P z_*{LkCLM&V=LrlRQ^V8`K2NSX&~zRdQK2` zf->kf7W;U1MAU^95sg5D>GAp{^DRJKAfN2zuDie|QPjHcUPS%YJMoFzRv+g`cR{GG z{!M0xW;6sCge;!OKVii3bP^*v59X`4h;{vJP(neKes&+8Ta#d{_DHgKM+7HGZanA% z>9in9tSd7BlK?Lu9?zM`{R!W^EnJ9yrGhakh@Gk$l~B6D@83V%gGM<2h7LXg$KcIG z9@&ll#+^q$ftBw!dpt?lZi zzP$@X2Nujz#92j2uvHF6C8g*ja3TAp;5fNT55#&gw)dP&f>uh>NII(uif%AOhSD;{ z7UYrCr0+}UGDrup>-URZ)79{;&WA}c^b;_HKHR=Q=Yd!EF~C!I1L%ZLf9c%cIw^d0 zeiMbu<6(#mWOr${;zH*XdqMghU?qf^^9gwpW%bEBi!Ol<{Ve4`zM%Om;3-}AhM=s7 zwOeIh%KE*3i0PaID(>^aO!^ks$4y3_Bxc=|=t%6_7{ zrtnNYH4zVT12%j2Zs)^8Gj@)F_Q?V{**qnXY|d^z!=tkv!^106`tcw9@O7vsT@LE( zYmg_|yZ5!$k!yPlmuXZU%gs$Mb=>MgB`6b&#TE-5TX)QSjCYs?jG!gHb(GQfp)0=MGDXgK7<}(W zs&qPN$yRPAH|jL5TX-I8N?&^FsfBxniSuv0|C*HX=LtvM)(e=mi6qT6S9dE6Kyj=TM;iY8xnv@d@^H-Uou zt!E17x^aH{MlZ;OFrN_jYu=Ky){+1cdVdd=y)`a#j(?iX>TW$^%huqW4MnUR=YX`& zzf&9mGw;Re5@gu!*u`|JzBwE)Ff}@?9LfIwjs!2gy)oGM97|E6{V8c1{+Q>0JUKUk zQ5YhYTYE~H*@4@NAOt`>{?UjYFOefl#nM_gCD@6goHUur5k%GML(OLhS*pspY1cW6 zMgyNSVB`IHr41iqa{lx>605V*!I9Vpotb>@J@DKsLfPr{ZE40Xzm9_*hpnxhSZKlE z?Q-JN`Bm_6K$~j|p~5`icm%luqppXJ^fAz8-D**~=fNLq3udQo?>*A~ zsIeUq>42}GiHBio-;rGZcO(j#a{^u{H|4$oS?iR?`j(oK&g<&10V3EWe@E@tEdq;_ z60t(_a?@jFHxW-eqY-0oL)k}C^V5xHj&bb8MeYIt{I}dx=NwG12pP6V#?|!4x$R@& zg#-}Tb0pb1Zyu)m)P{?ChA``0Jrj94EwHe0?I88Fraryx;7ejdt=WTRYo>^G<8^&{ z7jMJ~Fd87r(i6rb-;4E#ytus}by#_<^q`=O=nXM)Vz2X`rXvYP{qx7xCJ5JmM@)Sl z48m&mN+A1RQAWnlXH6r>(p@lSa1@W-u8a)+zO4s{@%9L}M6mP#YzqbHutT&nGovuS zkxdqUbg@HpO~gvc%FKv76k#Jaw0cc>O6TNoUZ;ZDT{U?b3q6eH0`1N-JTZHN5KaGv zZJT{twHFjY_NAfhn(JRO0{(;$dt@fOx(aLCq;DB!7MF=rG0_GKA`cO(zUEs-b7EXM zH1?1&xR@^k*)?CBx9ty;@ z4sdgy0(C7*X?7kgr3JDx>gQ#xvDxGe_DWYUCZY`vx>B$lrjpO<(pi!<%}#9>a7fC> zIoC?FGqOUm8eTSC1FTOHUij?5-75i6qC8e!UQ*Vxm3eg_N3n^iK+kd1DTC!4DvvcG zD=W+XYfTe~wl8;Z5$EKw((BJxepIvJG&?47;Pw6--B(mQW&AKEcX83 zq?~Q9gb4Jlk1@uC(ggc@{o>~TVsQ%6$zELJG)!9+$_{W($+;)HrQ?qxxk6JxMm~tH z^Mr@}rb>C>I;2ap7pJA?jMu&B2h+X^0r8b7_uVWAi%GFlgR*nlVzl3qNGGPTwE4PQ zz}l9?cK?*T9G#j4Bo_V`;s=cbvFncTLbNt_9ZbG0xL1<8a)yjLwB$c0MS)f71zxQJ zTbF#zroay6(I-sd;kf65X%K-)9z^Bmy5!b<3fd29a>@9vNH9+(=Cj=VgK`}lR{n+b z*G>bmTwC`Zl(brsy(KRwH|(!f7F4?qTo7$6`yT}5=2%;huxu|DTncjPrVuMtkaOq3 z%94Uym=~OZtRVNv94rHcL1P`c@>n6{43c8!4N1tg%(}b|7<&H0vedZ|Mq{!6Ov%eF-aKU;WIGpnWes}(_SGA|ATMP~S3QCU zOKqJt78tjVSasy(TF58t2W!3iGs0q<#a2}mXgzj;1iA0525+s z#w!f5;G-?Lxrc-zW%%w|VQ2y$(eV2;NM|}Y=H0ju&8d0e&G%LjGvO%sv>|NK%Cx+B z6D{mOfntY1VVEDE+jtuE3ifXLvhphDei#V?WG>Kd@=0Nbb@PzkL7B2HqSrq zPZtQFXIOVCaHp6mJ%YU=@e=GWN%eVoh04T$>U{vj{ekcevjVr5XYm8nU7QTN=EuLh zH%Sfa`>FYiU^^2@)6QdH(YSOCUm1@2PM+^IEC^)J{5Qc&{pq#vll zLzySt`2{~H>jiSDD4e$yHf#?EiOxM7RSycy2WW+kL>8$?UsQ4f2V)k_CL}?Wlu-T| zf^5e|1F_tc{{9X>sLGkreI4*H+f-)|81)uxHg~sBw2~K2qEqFRY6XR&*fgOKm!uXhrQfKAfSYp#3n2{PM8j4D*hc zK!TzQylc_i78qL>@>q`_v^;(+7*8Bb?9(WWb9dCa1Pj5ff$XH$nNQ~29ZUqcEJR#8 zL)dL0^-qrf;|~Rlg7lSMIvHR@zr}SIrB&JaixvZ|1jO4Q7W+kBUey|OiOA0soL*-s z56&i5& zY()j#qdHt$Sj_eSdouNv!Mhl;Rfh*OJaYHrU%f?qo#YT)jt?w$NJdaixZk*7Nb?=y z<%M~yvd2L+2PJLx{F5i!;uZ9+@#)9z@bJgf!VTvCav`fMCAB8@sDT^=-+Uol(~T$O z$JCnFCxc%Dv!~b-(P{}{pKeR7(H#0|1z@7Dn92AJ4(2V8`<-BR?e_GV53P^LrjsPl z9^QB9fYkJw0yDSaU`BYT;96e@VrSgQsJXhKb3Lr_L9NGkJCdH&pTpx`gYc%6G!}+W zYPjp-i0LH%B*R z(Sa4b2|zo=JN8S2GobTXlTvHyHFK*@fTdoM3Zo^M?VeLp-!t5qY|?o067x8Ng>^=4 z{pO&<5Gvw(C>uqaUbkDgLrUvGA;_U3RU>O$3F8W?y zZ)8ssK`*x*b^zf5_te+B-OSqnDtX&ujAi4g^1Y$nOOUugES@HA;n80}YfQ}0_T@Z- zRW)gLYFkKy^lP~bkmwgU2&F;nj~`PS)Xq*O`pFLhB$+wXk)+fHdmr66SYEw&5DUsr zq1boHXc#Q_k!&n1geBjGKz4mbM#CJb%Z9}61#4xJjsp>8lY)5*!eE`V2|HDpsUM$5 zB@)5;30`b0pi_0+K7ah;kPCug$1Yq`7K?o^{lVvX$A26qp;8E*w+zG_fN!fKw;MKO z(U|YEkS{kDP?Lccj%mgMzIhn93=z`o*Qs4!HtcIpB`XZD->cU_(#5U2`QG|^GJFpD zV@FOQl%0~1`n4)|S_WY1z~eF5Hm5L|#e-m=!eqlB?(JK{eDqgp6t;p7U~_d)9xMOIk@nQjgNc!T2qw!S z5i7O6rv2lDcoJML(!|PaN&r$)(w>gF9L{4Y3M2m<_sA#4-21aQi>cU_ZgU6y&BhWv z_fPwqdnc%g6r}@ks>)vUMsthf*!nB)DQST;C{oK4E6_0fYb6~^OdYF4BeiSHd3Euz zbkh}yEfe)jr0%X~|Al+!4R8Ui-lBc5xm3a07d=y3_LMm?{s4V(h*q&e!Hw;DqI;AZ z&UX?{GRp}qF!}kJIj${^4vgISv?O(ESylTlcTdc25_2Hv(gOo1a1KEsICQgXiqvYD z!?aIDqvlOVU(Q}VBoLC=V08)+4cl`zXczo;*ujw`3U)KlNaDcES*l8B!ho3*GX9T- z-%1TRY6X&VN~eCy=pnmT27xES)f-xF$^a_sjQ9>tsD0iY$Mh+Rdp`(d0sY=ZwaW#d z@9OW&C4DbGw7$;*RNfh(xvx=pWP9BX6_1Yzi%8ngLUi*dpANixC%huRQNF63ppSE@ zeSXFg?k;`z8+Q*Ieds62ZAczPg?^N%s*=lH6)|(jwM~TYa3R&`zfdag`MABIi=-*r zkp!J1KnqHzO56Bmy$MvF?{xkj8_=Hb?D ziLNQSYGH(fbRl|TGhDW0rR=NkH;uj&-6CmQb1B0E+ht#>B!Bm}Je&Wylay>x4kb0_ zm~8aWYfXK}uT|3z2pYc-y-OY>S6w)|yXoDRu;aH#8BP@K!gK*!;n;di#HdtIQseGVJZ54mo5FesptdZ?3J>OV{0mLw5msTWGxec~{Rprr-K zIwDh{)oT7w$6F-}l1~$KivWds=O{`^{VE7k(?8NEPtXn_^?iTeP|aPVj=x!Mvbb?A zDZ?otwf^z_VLHF|+v^5Qo)PX$(23pDugXmB;5Syu)_=CYjCN=GO@Q+KZgGW!PawGq z@jmKp1pO{Rw(otFe%Bp2S$21k+j-Hfk9F@_ZPUq|#Ue`>@kSL@kmxxrg<@Q<%T-W@%G@s;VAwZw+HK-PO$sV1ax$X5j zrZ<%Y$a-e{a5EWa&WURi4R3rUsF{#DjMCNa=1vHnEKr|*Xbj;n${F2zHFZStx0FL; zbnG4Xxs&or7NVmYvPW#Vy4B?NzI#s&Fg-8mrp!MHM(V9noL*Bk|D{7J;cH)rc)y2@ z8u{GF%XPNg=f>A0?Ycsw`f>cIheg_p!fRND$?pl;BtVb+7OTq~TWqV7(VF{dGC|vN zsm-F98X6_K&p*yRaCKG}X$PkQWRkj2!|?pVr3cPig>2nT(w-Ecw~0!lPZ@l>lep$B z=baWo6$Hq1@c7a3<+jfcm5p~bVQMTdKs)1hXcjLw4%lh><0qRzO9V*Ux>B<_(yc1{ zV(jN&UBaO*mm03(HKsClU*NBKHV=$`lQIMgsiwE9#_GyOh2LEA-SNyEMiS&v3+yVj zxH?lGX)j%J$z(G@TZGh;1irRUKRiq@v&?d;jmGFM$DX|H}CZf?E@B5K147x>tQbCvhmCK z7l&0JYfsn{Y|M<*ULiFn5$QYsTu^-4bV}n8Kf?E@kdiZ6Yfw4%@N1o+x98e3xrGW) zSJ>tWoIQmj(?zL!%s7Ovh5+pgUS-%)(^0m=WRBy#pQOAO@Qhs*ZM5WB_uzQhf_8^R z%vcvvC$)==g16@8x5?PJwK3zjN`O|l&6!v*^!!uT+ZT5qnn?I+3sCP<=ZQ}z4H|d+ z*Dy_cW*#>S(BD4c#33ES%J)pw;0$G2-M9eRIMz)%KjEfS()}Qb!tDfg5Fm?|f0O*( zglEpuBm9hd35SV7D$jD0@lI}i{$+#2`9-5i86pZ$oB1wdd5MN4XMKy@C#NtR3ecGE zH;m)tM6;fbeK2J-Gk&YOsIipF#ueA45r2`0Sa{AI<+(EoxEb8%at%j!W0&wExWf)z4Y+5 zO_`~Nxxx^_x1|7GeEVl=oNqP%@i5uIyduJ3hX7r3d0~3+@44FJyA8^cnciGeh_>3T z=Og*qZ@NQrMz3b(d`lM<|LziB<@?^Y%a@<3N-$b_h5$*_bIp!4i$-|gKc=3_%!@@r zs!n;N+0;)rE#>duik8qNXg{_n(wJK8EkyYN_skbP;Kq-J?#FTMWTe^)kk9RB76~WsjrEqjXWYPOY10Dqb;Qc)i7hTG zW4&@ym+d4RL_$jKNX_&QKd&|k+>ZM47=6(xq+ASE&S>Q6512WhE~M5323RH6&byX=*JWs&6G6=i(9bBNnXV7^I)yL0`8$TmZJPkO zp0S@f)A#dOdwEgiYNjO<79yFGTwK*`=Vv(&UZo^N)r)V=8s2$TmW_vR7<&!cGCQ+_-)2svhOzs@MMgth6V&9W6w;OWWoepPYa9tguEQUWSax zVb179$%uJIN381JHLf|yb0cXl6(Td2Q}f1taT{V2W4P}&4+!-G3#b}W;g-GLB=)#UoOJ24-yb$ZbV=O0EnmL~-mt9dZVz#leCZmr9=x1*8BA>XY+fVM>`tL9! z5r;dY9(@y+rk>*U_N$hqoMCEgU4Xb@k*+!)TBMJam|UL0Nbqe1C?+Y7X7)0niZZpz@>N4E>B?%Rec{v9!!X6&5;v?fT_{e0uw z*hD=^kC)7xwv(7l(~@R|Xoc}% zk4)Qf8Y4GU@N#RJb|^sZf6sUAt`?oHQ4sgHu?Z@9A zLm9sA-IUd;xyzPneUe^sXI?H}MEJ&ZQMTGombG=or`lC`BzrJ6__#t8^HX*C*`L#s zn%Bg=+Q3N9+PZ*$mV$;$Z^_U)wVfYt&yOH zLh8+g<31aQd;hF&YM!u!(Jj{s(d!YVKG(N7#~-b?Gm9Qh#+e|Ul9?^zTje0ymOFj9 z`rKQjWOoEe>3Y!0X&N>g-B;gE8_!rahlSMi@rPEL{`20p=V!f?{{q6-wwqd@e{W@c zl-uXyc9G@s%#1bcraZV^D_1M>f7pCe?OEwYQ0+qWy+?YLk6p7yM#B|rE2eeq)2S6R z(^svUyYsKpcHYp~B2orjXJmdfcU58M8JX^W3K65sdHTCqRd6g8VK` z+*;)EsbJM&#mOs?@Cwf6{gii0#rUH zV`KjGB8`^!OGIq7U z0O{$AHhB%nx@tVt^H~8?hQNnQke1F-&l0@6z_pcN5P63)cIAQaYlS_v6R7Jcq zV>};IA@%1-^5#btl6g1Z$NV#9tf29Q$n<&3X8oVP4bfty`c9^YMhL09*DANji#j!R zWM2m5G9GJHAu|8-e9Pcn7Qe$)oP zn&Vn^?!>$iJc6p^QEQy60-uM=EjV_t`1C7AUvvv8d&9QC_z6XC%RCNa`)lx0x0s;e0+cm<-0n>w=4Lxkle-zDZlPXyIV~yRtMxe?|a8sS7C*Sr4+Wid+X92&gb9nywyR{y5>-lKP30~ znuXP&>5ZH}FZYo}TebnElPh2cVR}S%D zBtg57k}WHZ>bOh)bjr-m*~9p;pK_@|nm1#Te3mvO7p*E8^oQiOLr4k6E{Yv`-s48| zp9$@^=Q6gvkg^&&Kkh^6&9O~>$_HI|1hvNAK|w&=*48I0yoFz9{bFn`_Z({L{*bsS z2KOgV@rfuXW@@a2y~Xuv2aMm%9~FH5m{}B~3&VxfcE!;L{DTu_m{%1qP;($?rjWXs zta)Iw+m=yDp+m=iW9*%iLds6>+<|uvM|GnvRw}M${9`8}Wpe2G0l~c~OJ^)Uwdwgd zrfvo3OV6Wt)yObvEr0elO-AGN2+-z(e-b9ud!KdNy!T}gBQ@mnsOiRriTj>E+x}_N zg8e-knQ@j*trA%zYB&!XTX%W);8-SZq~VOL4W*OHc=x|P@H~0whY#b87oy5z+R5*O zPfopiR=O{dnE?+A(PhgS$;!QVzaFsCUbK+$Om@7<>w|^otekE)DFK*DMc%~H~IeCnC{R)%1|jlWq0HcH{F@{&CynClka~X ziZe3EUY2_OSzN95KD8D%MoxwbsT3#uH18RA=k2-sPiZjIn+AnQ@!*^zVN=+g5-?QHJSDUY|I&;K3p(L&h2mFF>K44(S%YHtzxrmwo@u*f34GRJ_gB zbSo6_u4Zbq&01#k4eO?Utxrua4DrkR@U{KK4rS6h5nWVEqfN%D>|cX@#xJp|Wnw`M zxm2D4E3@!=Ywh>mk-Op<-Q6Rk))gpbW_WxUf7^Jc`b9=+Bnv5i*u>1fusJBJ+_-lr zqqC?&wAA!qrjezxIm_28axx>MnuS!b+rzB8lQmUPQ?Spe)gG~rcYg=c!ZI(&AF87_u**Gsh-aFLn+oWBOrkv1WZ17xa`m*NZ`JDTIuRL>bJI(aGiZdEL z$Nz-vL4z-r7jM+jHQ95;$=-^Chv%d{3Jhap zl!K7k6jOh)EvbU~aao|jWoC#j9lba^fp;4`FR(_*_k@f;~L~)sL_mncTd-JQ? z)-rNZQ%KF&V=Q_;>yw0c&!!zEOk{6}kZN2WBHCgx`gWB=^7F@Pj06{;QQJr6KlXXn zGwt&MpGnMo3oSsF3Y+s!H7z?gq?PXLjVGv6H+9tcRetK|*L}L~hD!%Cv7kmFl{xim z{toL<+3S(hCK@un$X)?j@6mi}!rr}urfl|nHIk9avI6v?$>g-`sJLmeMVCZp8838~ zkmB^Yp4qhC&e?kCcUuj6CKBn4E-%VGb1vg#Z`Rf~Eti@2Z<+w9&euPiVY*?jLinx2 zFPMm-lK_oLj6Ju&Z2E$^CmwBzkRhm&Gn%gya=vg|%ojM+M zLA&tJ@^?FL#Udv96(yujbL=n95CwkxTp4hthLMxHLh6jj?_&JnropjY!$!@}VEV!t z$sOK#vEu6}lk-w<-*>w*mcl|b)TQKN(PmzePwK`H$;+e+CI!f~Qu315KmBmytl#?Y znOMct0yOE1_a$fJ6$v*woL=?rAm|w(6?;4I(ubFORg=~~(Hi1Ta$72-+Sfn2WPd1v ze_Z11_FO$C{@{%4r!6W-&UtH|5Wlf!JR^&OBvT<^ilxHEMMPh`MqwcQ4 z;m1BJN^*=^raBVT82jiBp9|k_v$>vPJtRVb@dFg{sH>TNm%W#kbOsjzP|b494(S|mXOk$g)sc|1Pcf zW6(4qb+-G>H7z^&&%q`k3C|e$kRYVe^{lVo_1RXP-6T?SW@4n51Zdd-?;=UP_sve8 z^S}IHtTxISon0Dn>#hHbZ9kibZv4)Sbq^t>Q#QVMo!6dgS*}+cjhWVI$)&b$iz;@X zk~A*(<3)WP#-5~|(G=H);`ylyAF%tJY}Fi?DE2~RK3M&Z(vkSDIt_yc{bM|LO=mPu zRB?BoPjB?ZEprUr7n8IRSaRJDcpWL4a-BZr;n#T4$%T(pRa2O;W6(_%ReL{d`aN%t(W1L77hA$N zxDdT@^myzr)%`jrLiT+wvtm<~OJ$$C_qcS!@YeQrXD1G0BgPAEEY$upzGKuB#oexRcA=j5O3&L?wwnbz4Rq&^0ps0e;lD1S|5$$U>nOAiuK*4sLt z88jU!(OG4dFVD!-T>^A$ol|rtL72t=v27=liEZ1qZQFJ-v2EM7lZkCjY}=dNeOmN6 z_0s3`LsxxO-KXl_-_0j#U}SiHop)L(I^HrVAa|NC-=cg>e#Us?%}U>A+wrd_yM&od z59_78hbh%8r>29kG5c7x1Y*P~eVboB9@MaE$4<73`kW;#O3EqfR(!g~H=eVvalthr z4n@lYBOb4Gt4xXcgTdO2Lq`8ZhkxEM!s6AgncU0fsS9Oh>ec|5^4(BNw0OO`-v|9v z(@N*fQSfh3omjj|<^3?0XjNPx8=3G+m#U6r_5R#V`KoQuiL73xufB@3iT+$TXSH>I z7OUuYw`XfaMkOzr{8U|pxKqhV;pVA4`#Usw7_z9}f{?lP8T zOSa=Znxe^kh`1*L4~dB)=u{I|v6l7YaySbg7ZPKoCO7=BE#|H;>~$#AP&-&@shUFX zkJ@3B^7l3kW$}&qTv(9rY85OD3J-uRr}q`ySFb&=^FmiUtv2}Obqso<;5!?JR}0TP z-6d7qI~aw0jVr15`54AkK2gKFVX^NVYyok~P04(=Nv0GcU%U2PQ?)r9)hOVxn7HuX zYwQUA+{N+sYzAH^k`bJQ?Q$$N>eLBA+J2^W%)lz`j?p}Ix_}Y66TmT68-$~T#bucW zC=SJ7@yfOQmf!0J)__sOp$xBd|&o4)t#EH{h{=+?g=t;IG(Nqw>Y&EzZzXw zVxB)11AJ_HUsWsvqL+*gKT3xu|8)G~_sTfXGOb>_t9XmMsm9ws&+dEbh&Pg|i*inX zSZ;yW7>RHjZs|5vl5&PSgYjMDrHH#7nfbSjwCYEN_2?>+mc0`FyTrxRS&7RI{!M|` zDPf7T^AoS7QXH!VvK(S#z!!EKeWBRx<6pL_sY=jv;lq{4hkI_N%~*xot4{xmED6}( z9*V=TbcbrOmp2_hHKv>!>Wf0LDDy1}rH31`ovTSh(fc##+4-h|Weuw1r_VDNW}Y$O zFBzD*W6Q4@8m`cZWKP%>SQG2^Lg3$8zGauq=~gmHQ*S2tQD}UU;yTmxwx!kyyxrXu zI+RoD(yAdR?oBh-UY*NtnHF?^ne`nT&q|`+a=vJ9dD^}@RF9X%OKg?u$+SD`h`Yv} zwgzCHDs}0JU9tef2TtGCGUPG6iY}K9kV*}5f|eJ1>3*J<7^U9{zTOX9MhqONzY0qm zvMaO%k5(ykllB(lH+fMB!~2r6li5}z|4y`Yew$XUgaaGLJr+BK4Nf<-<(Bq#o9&rh zg1I-aC&|=tT~8mQ+#Nc4}I%k_VQDEqP>7S2;?5o{pTp)<48Awit%Riv1~s#D1VZm~U?5 zJ0U@;e>wFqwFPH&z{p((^LxGY%%^+nPD;{YiDz8V3W{obzD3`;HF!BMHzPqsTVYR2 zWGQG*b#h2~=rT$hrm|js?G2tKu$(P`bAJ=aOM!r*0ssKWpZ4F^3lISC8yEn91z-V8 zYz(a33>-~pEm#>Ye(nK9p(6rbZ;xh+hmn%`ss)Pr%lwz|W_8394NxI&oN`)iYB#nK zS}82B2MtiIZJjh&&j-p^MdCS0nPy2%J5HPeP|bB91c~p0;>gJ(rIV0|Q=%}J z(36zi6{g75&1Dqq7D0K!WAVKY9x?5xl+v!5*dwpigw* zzem?cN4DR1X6uZMprI|htagG{R@B|e4%yhSI3Swn{)Cbw6R^PR_0w04*CX$cqA`vxN z)b@TGlnt2}=~+tb>+W}PR^J-*{0dru8cWiSI$M};APrT6oIoiN$Rhoqu)-Q4hK7Wj z6NS(v(H75%G7P+VZ2BaLp&-;eYq>Pd-2y}G{^WT{L$;eo;m?`1+u#eK7)TVkT}a8v z_QVv`xd&n8Y|F{v;Y9H`oN4Cp@CSNBj*YA~C+k8BWVle`Z|FtGUTpteD;*__Fplu_*w`;S-!rBbeU>v{Ia2Yn2B5b za;(Kn-7^dH-?(MGK38Bn6uCaW|64j~Kc%z#)Bb1aura;-XXzyWMg(}B9;9!DT@2B2 z(bp!9@`TI8$%rc}V20*8;h|l{GL74t@CRq~|F)D@8j@TP=od*q77_fF9`ZMj0fr5m zNP#wie->LaoQkME=P^?WhSL zv>YG`9u;poKS)cIUP$W(2Y2$zEtS~0U+jg?|9Vm0a7_=uohwLvI0u3s42Sp7!sO_d zgg*WFR4*%bZ8*5?9AAm-3b}>$25_sT8Ltp;38{GB!@Awi%UWbkOY*D!_?XZLYGn_~ zV=fOFVd$t?kSMpiVIRF<1rmC|hiEBR;ee zd@w%WyX!O`@1zwQbxpcet+pxL-&ng{7kSY9waGeTIuA7J{UMd%B1t&w(ZhsrqwW7c zt3r?t0Px}QzlWBc{`(4Tp;>FYw#WI2<5_@jb*|96% zm+yW&__d?m*-lsO8|`MD&m1`|JT^*y>K<=0CXpmV%PQ-?>3FhP{QLZ}pUUgLXY7MF z*)ZY`9)qoqQ5<8+;*|siS%Ic5K`4v*a}oJ|ASy!@$j{b-9h69SUfGly0#I`RY>g($ z@I53$&_@HH3S(mgL3EIqFCwG_iUd<4tZV7NNO@@#io~w`U@8rpTEdcUD2`}tuQ?cT zRIW)ck&x{tFq?U7PI>9zkLB_MeBq_y$+$q?oQttvL9(t zxn?U+rl_`SJ6S6r%fbu(!brLZ=4SR;IV#p9lF43>-KoNl-$==mUk|L=8-mIWzT#~N zAbQM+KJ!+@ua&W|~s=UGzo3gB{ia6O7z|kg)5#`9;zA!fYpUwQP&P+8H3Zw-vk3~+q zO?u#uM1y~9>j3n1B-Zp=X=!4v0pVZ%#je3ytl;XRVyj!*04lq40M|XdelFJl{@_2_ zVYiYq^%9@bHp-_dTPGX=o*^=3p-rfidE z@-b<>OXZgkZ%y%1I@PZ>R=5OM#zHwXaoVP~Z&^Wl4BS1 zI7!rS2t;%+aq7?(VBRWXd=n}OiS#s@b4WEndb~j=^_Yy1OP`p_&<+vqS~b*7T8zD0 zg>+6NP)`r>Ca&y~vW@@fXYZ%gttmFZTT09wH8-K;3NUj9$p^mwX##v>gtyD7P3yJ- z`;Uxs00W7LA<%<6GC>^|8}x+2-!mMsV<=l;8umYdHyl`OvJ54g9H3dD!%neZaw z&H*`1HSkAl3;YW#rWxwLTrY_-rj*FZ6H;oX-bw)FEifY)<$%Z1jWR|eW$auCy zThPoV4kL@G6H4zVz~mETpYh-vQA=X!K}bqDuTfD6~asm36Do_gyn`q~|R2xSKe(fV^TuC5`yb|CcORPgjS z@=6gRMtlgmfU@ANN521+SX3e^aZSP3^@h?XAmk_qxCV`nmTxNEGO3ua&x?kcqxI?= z({uRo`_>n#m18OQAP;YJsP*<+SZYeOa!*vKe)TS#fqt`n1|xaGncqa<6#dQJqc~7ZWsGA6>)HQXF zWd{uQMz5!oAQR4>iOa{7z`$Q2UdG&zv$u(qEqwDVw7i5_8PCT5gJxC^+`>0pMCw7J zR)lY|T1r-z#sR!Ycyax!9Z2GQ2lBQQbMFGq)gE&FaVB0J+uO*w^u-2!bqpXkk>~iWh`O&>FdXI zK;eE~eZLJ}u~df#R&XC5mw9^Ct2w7b@C}sM4b{JD|gDk|6r}26iyyCS&rQ&q3PX z8?(v-XCObp=xKQME8uqJjfr~&G&3%p5;9wXKZ#^}U|hq%SjN)%Z2%`qwJ~b_Vzq?V z=kA<{%GH(eoHcx|0zEFpt9Pk)N$jpt?K}$)s+$sexOAThleH?U2M3+bgORXAN@=gc z-2pd<(>%_OGh7G96IIM6yOph|M*%8-V!Uj$qow9Gy+%`2r*!)Y5&ADr4)XCGA>q>W&s#efq z^v-f+k%^SXSbbJjYjRa9+RO>+V`_INwRa)bLcv2GT}-F=MYjpTK69o2qQ&V`BD>6t z`>Si#Gs-Kpx@FuJ%--Zmuljt;a}l!IO^8EAjX*`Il|~Q#cJZ{O@ym`bX_RX^xCO79 zN8b>RD7wPMaQbOcu!C8UsrrHrlhITHCOby0sXVnD z#9W6jBRl36FmtwIz5Arv%fG-0t!H_a=1WPkNKUWBFidgDy(!O07O*xd{FZXzTRd*; zq^d``F14~|^3lL|_vR&b_Cim2?2iGaG6AsT0pzkBmzfV6hiA$&%XA!$@VhhS5qo*% za<1CkbybIZmmq(?cYIv7jHVp-*$gOj`jcGf8Q5A0DEkOu+vxsvrk`1A?@77&zY%(1LIZ&o~!e`HQ zfSx1NLkOcyH|4vOS~;vYvr8Xm>xmk9+8a=vqVYw;p=Wq)(tdzQRcIyv89aLVxN!XC z9vM1K-Og_jmsFKAksx^Wlig!w;|TUup=&s{1>NE)1oQWA>b)w$^@Eb4cL9eNv7wQ4 z8|9|((t_r8w9?^(hN#_0w+ts|^9&>A|Yub9c-GNaoLUA30K#Y2_; z7YGNJSvvQJ@o(jKkGAcwoa#Ykxmq3f;_kW6bQ{_aZy{N}co@~_LHR&o0^FtIoizoJVsON6;yOtYL}{>G&(D&4zg9Cu@> zqu3Fzc-Md(Otg8FI?ZP|iK=#I+5~@8N=v(qi7?{|ht;O-KZ&UwT3O%Xa<$WH<_j!e z;<;a4wg|{-?C17K1+Wh68eY>m8Y~7fb&F)+7;UWEte01#)i{2(HTzdlxM$()Jsg@E zXf*FOjz&ExVs+DWczmVRdM-ZS(kKAGzR01*E#44eBuP%MxN5i!*puAx{c72v*vSUN z{=KV_N)-(<^EAVI?cUVcwIY^xzx_Psm7m8=^N`{)bMM$~RVuLe-92?*{+aFMpGl|K2JRp6s3KnL0=IG@WZ2hLr`A=A6%u@lpwg9?Q#?VgSM{ zzS37RGiai9!MR3|lWaSb7YcTuwN`A0;CDjvT_>_!D^F`fYtlqnBeqrgDH@%MhQQ^A zAfgJTxs8*<@Q?z0cP{VgGX*xGr<0`h6f(xfaDjFBKKi|=hh3?0E9biboKU8%tphbrhRQ$r4V8uya-zKj z25bUTqnU|=iB;2H=vYc85}L^Gi<}5mF!MX)E;IShuPn_aczHPgr=pKJq601GI{9~> z?$L6NVBt2V9uO5S{)K!sIGuVqWc~apalXCIr-Hf;dwuK5UqpO;41uh0OyUy9r30P9 zgLk>eM6sfbvc5z`lt<+^ss~wxw-Zq1;`+0mX7gkbe?&u=TZgLy^TZ&yRuKaO8Y@H# zt7AtvMUPMgkIa{>M(ETR9u`yLj$UEc&!oZIzvi-vHn)m~gX2xI~9PAqOiQt_M5A|Q_bITfOIn<*6}Z%*bR zk-EfmnSw2-(6g{@SD^HJryNtoaB)nu+QEUlu(pD?In2{||=4#p7vXtRuK}_V%`MYum z83V>-XnC4YOee-%s-h*81qM<<_yS2QDqV$yr3E>p+L|5S_xF7BK=GSK!XbgqtL#bC zQ&EL_Nkn@tV^tR1?t0y?OMJuJ@`-hgw|Ju(I}AI{687r3Kt#3Imd>!a=g1ve)zyFh zR$5RCI?I?ME#G#{cZa#vrS9iS*;p z83xd6AIebxV5wg0PGtZO-SK0|s)EDDvH(&dWtltThc}HfL@(J>Ntjbkuf@LjeKfJ< z&K(UG=)g)}{<)bM;6#*)&lW|jM+kmD6Tb+Ef^X|`-DGU5GbOo@{sJT`o+?>7Z`U(t zKAJzLalcsN70IXG#Ad_QO?6gq=)twh64fQ1f~iMq$5D2$6pQy+XR&C*J(~EU2kvx0 zGvkMqnRwWImc_hzc=}&s7jU#*xwN^@>fu2d-{bb`l6VL1Mld^2B&CL@Pz?oDCX78( zf5a-5dDm#&E#@OeT}*ZhJN5ndo#cnjACctIzf3^8^953-nK35N*w&B;xET}ShwhA^ zM8Yv02Xj3IITrP?qx&6`3v~&H{QBl*(~njC?7-a2-+OrA8d!v&Vt)%<3sIq^%xlSG z?Fu}qZ^Y;8J@TvxR$5gj{@DG-#2cF4lIOkculrSIWOm#vDY3UD~o@IAKxQ6WsV@BFpg>!m>zs*}jfYDf0^ z$8k!M&7TSw>TWF^QqoAr&Hv!?u3%V9nV{szjK|D&<3*_F8rmL4MjJZ1x46tl4q5ft z{VYr0IEy2J)imK3_J#kLqD<7<5Jelc@R1iZ@1i-qeZ=T3yAo$JyF@kWOY3L3xz01l^F?6i^gFd@O9nQ6T6o(W-(|jp&MQ6>X*#3zZ{yp z)<*d4UEeL8?4q|OeAQxl`dyP))bq)6=3de@MZxRx;jN-~a(DH7ZkI|K+ChH;uf^E# zPWG7_E0n>+`q?%FB8$&6wC!~;@jT~`>CJlSYgKU>Fa$$YJEBs5=rvsG25x(f#Ni}2 z-(Ec@z*~uFqY8&q!+gq8+L?do>2n*$YqzFe)8C?QfsIhwAYv36`-cAKqcx)ah_TA} zDfvV9FE&9rQ7%gZ&4tmlulYgu{Egs+S$*rF?#O`-%9?Ku8~L}k6WOWYygYTTd<6{$ z5e_H$g@dSFqDzowW)1y13d_NH4OEczQPRx14NrKSCwRBAP=r@ZV}x^CJ8P*X9Ue<- zX_&in3f#VPap-RiYpnKZ&xC=J4Rjb~WvmC-T-$YzInyT?gE643LZ84x7}fq#UXorL zVHdGTDxNT5j2ajFFxNZzQm%`y2VEY35VwMf3mn|eK+Bq*z-p)V5z7P<&Bswkp+?0%!H(RZ5#&=RiV`3n3%R!RNY5I=4uXI(P9xjTj_0ww`o%T5~Vys5J15sxmeoh^!M6(+mh9D zHobB~2;O$Xe0{kk#=(8GG?(;wddU@vJE>*Z6PFa+n=QQidA#(;tMA~m`I<(tlbimW zP;<#DU*rAqu$#Ml??v*g2mw*0W$z9X#EKP1E0?3k?NGB3Kn%fEi80Rh6b59$?C$NQ zDkQZyp-YDA6X(~s%3I!uy8(Kf$*sbk$-qQ(Za(48@omV9))PgEwZM0~@<*gwZ!?ad z7mek>eq?!`2j%N1Lc7rH5lB{vEWCR{k6yOb@^77C^NP_?<^leBmX@EaZ<6d%+`$F0 zUBoo*oC>76fFGgb7o3MC)#c6+Ty^mPzHZ1ToPd|kg=}XUWnfElHf*4@Q$asYhzUfdEenou z;Rcq>>r6!?Nm`d(?gTZbQ}$D&z9r!PMUyVyGrLDy2e?8semu&JSj zpT@HT^=zrhZMl`*!o4aMk5H#4#U9vrEj3}9&r0l+V@y8Q^Czfk396EFlU$H*(}QXPf*`@;MBA_^|iy}qNYI5o#d1~FUQ?~{gr$P@ z952xjU9*-yp>Yh}C7g_4Oh0zv0S%^&s8by*)4pUTP$NJ!qQF8&{aU$P zaCW&8bF@$U_~Mz;VhOM3oe_O=hd;p~n2|=f!xc^{f7~lM{M=_N>)fCsQc6w1?1sc0 z985Z#*>SPP%wc@@W|CBFw{SxBiJO&hsX|L4vg=U)A2M|K#|t z+{+%OD=^tlP<{jYk+t|D(z8jh7;CVNdg)JDjnXSmqr2!#+0Nv z)SoMW!)_%J;@uFKQt7#eQt^Y(pl1qgH@ztMQzU8~W*}811=pN^X(P-^@gpnDz!}&+ zMQlr*kOQj3!PL-DwyARc3R3g6T@JB$U=Gi!O|6^NcIfwWOhFFMuA7oFK#Zq|p`0pK zl%wOH#zNLEr}wVa5f$8&z6aQwb9%gX1{t~i!RK86&1(VgYV!GYU{6>*QA@_`ezd1z z){KHamO`tOraLx+W(;i>MJz%Qp5MXCGq0ZuEo+31@)L-pd#`K{cwK{?$Fe-I>74(0 zF2(`+il?m6emx6+Es;Wqz5Ww#BCsp+Z0K``-X3X7krMb^9n z3OmsFA_?R*c346T9)d0_ipQvC%&W0sfblt))|Rx z(^~201wYV+UD7j_-J$Ss!Aihv)IW4|3`N~qXA&CB@u8)vZHkjjzZiuV7rYjO$|62M zp>7&=)57amG?*{FIDTL$PxM=NOOT;O!jWZ{*N8m{6=}w2K)`07OI{kt+9|o7-*yA< z=Lmt8zAZrG1Nc-Xda%*%HU<{%nhiV!Wteo$|EmOCel1^!q=~F))xuU(q0be3kwGPn zu|IcuOQ!=XRw2{;BTw;Z`AUSUh^ zTJFbrg~F;?mcJNJ{q-&}2>a<~BfoRt_(;92fFK2McSQfbNQ@6P>EfDMoIk_Z^%!Y% zXsGKq*!O8T%-(@YOLrp+D_#2IsSU%l|SNki-%^ew@ z_OWowKpY}>!FIE@8Nl&S$+z#aDnF94Z*PhB%mGK4p80+b-XnBLsx--Je@XVWUa)xF zb2uSJC@+^4|4)Q{3#_X_`&7L}Y>fh9HJy(!i_*cyT{0%7UGz5QJ}r=sw_ToxbuifQ z=wNl3;ejD#7`Xz5CT2+ye*u2f??uUrTu!=UnnnNU(%8(#!UuCwd@j&$ZzHOo^x3xk zRXfAW7dondZSPXo(S|ACQF11rdbs@bi<-;vY}v(iUQUc_FOqa=;njs2!tp2}I46L1 z26axq23>?=lryz?@^x37jNfB=Jl+InS|oFBLp|sA=Z{vThSZvvcFkieRht3){@CwL zf-a9lc^_5G1h)|by3KOij@XKBKv$tmH_b|v&8U3?RxiFx_phA(yWd=C3|_1hZBNJX zvfz8Z#>btJ7cPMxGcDe3ar1Gfe_W{W@3yParqjb3sQG>K%!$Sqx$j{vJIn4>>+JK- zOKN9hAfZlggr+J0WNnjVQGj34pCW;h)eIJo3%;*D$|Rj6YAi(EOS$7l)Y)cbD5aJa z$DRfWy_uWj9Km?*e=T>TD7*>Itv?{?8~(|q zzVm-)W*w6`dU=aigtPs{i0dbu=vg89cAW1@2kR#9%z#~`Uj3b~74m9JB1W=jWTDy9 zurjf32MR-0mPd%bV)$Hw?ph22uR2hfZPz|jU=IStPOAd6iN!&W8{9HyJqfruH3*dfISe(AE8XpjaHb)-6lQ4%Za@&x~_fYAs$vg%FIn=Bm-`=BWt)cQM!1>JD| zPBb*F5S&nZbHtg&V?K^u@xwEDSX2*n9z=sYg+i(R1EORU0dmz(fuEIx$Kxj+ul4jQ zAh4kS%gw((GlS#-Hz^q?W^%B6lCkhgy=UAwesF^vN+;MP0t=;})dOnsuIpYh&UZqr zRkWndEkxrYtaCJafu|TnkiR~U$Wv~Td>gu(?Ku@+6yYgbdah0Bx~;!w%#hoNm_NlH zV#h39f}p^B$oQ^#bamKfRngV$I6k)oh@RIOY;nbEdebL{4(TG*RZcSoRgOmO2F5A&<%T{Yo- z=SEBZuAq*#PWG7aiB$rz)>>`x3^}QbrL49Y(n2d3A!AxrRkYiIWelL`2`jpY7@)|f zJJK!KtmD_qxAFKkaz0PTWiZQDwx@7E>%%BCF|0rkdyA^xPSk9rj zkR{FA+~Y@Iy?s5AT=-b48fS~zd_RM=vUVvSl;6vYG1U?e#z(k#d{f-hXuE59+()_h zX%BLu*|lf>_ttFhU1}l~?rr54J-ImncIs>6lDRk9gwJT=6PG<79^i*6g9VeH0bY~5 zJS>DTcl5EwnP7A<%(i1R@xhJmaChOTw2K_!`Y>~kqGl`ebCCf66|-r3=h2+ zSOOGTV#`?qencx=V8C=@I&@JMuqhOho39=d;GL%2I}5Jf?QwB4<>K~pVe24L`RHVE zR~+C;W@USNcmL}52mi!!x%v1+&g}O=p_Re82s_cpM@EQzlF6&FCYF4HZJZcZW6n=? z|6443Hbh-UM_lgylWUJ=Mm;NkFD64hs~#85sXYtTlr`yfEJDM3JESNz@r(Rxy z&v|Y@?el8WjeVC$VrRI1LpMNuL*Xp;_^wg9{$g@UEexaByXL%O=Y~VwiEk)O%316& z1StFOL9NE^i;B2iVFVtP*{Lgd_-6wU6eC_+MYVc4%yG_Mj}?SnzJxlfsUsb-fog(t z2Z4@rz~ZzXC!SEshxAuIU9PUyqk41mLQIAi$_cq6;%zE!u}O!2Z}OO*PF>wE`Q547 zGZY+C3js6xNzjfCsFNjV=U8juMxt39>ime=fNl3Ta1n*s;%_xe7!$RvX!j7J6VKJu z&rTeO956)JpM+>}9pwD5PsZWRVUt{xzpn=-QYzLrF1SGUsQ1Wrp^#4_)<5TBKm1|qC z!J4t3Qods5rIsk(AhajN?eSE7DrdMuBwp;ZNeYvy2O?D(0 zTXcQyO)&pLNnaSxo1I`@#8LEgH8(lrjBl+R-sc__CUN}gArH_EYS?jxQVVN5Z|$N} zTLYs?Po+Mp8BAIGpa=~-syw`IDa^)(F2vJ=zA<#_Nzscv8uemZ_q*nr7>uhG=Ldq8 zPRwy=c6WY*;F@K|wEjkUC}YNk3)MmQn{KWA79Z*8WVb_Gg}U(tJ<4ZvegQjLYQm|& z2`H|>IVKY#yQZLUS`6QGPfuUh^x>A2|vZCZS`!UlZ4sXz9$S^N*g4o&*M*Gj(3X1!`fH3d;UnWc8^yjWE^-cMO7$_*Td0e)Tt2Z5gW{GhU=GdFFLOxF3s` zZ-XecG#I>Ua}5vHB`CC_uCn0(AEPLCVW6=e&&gW1YD-R?*aM!%>2-%`j0JttmWn3-5!Q?-fGH)s`x|P{E7iELDv!))hwgYmj;JNkV>xun2l|HS z3(hOn%)wHks7#*fP*9qtWziJ!ZL%oM^eDc;_Cf%bfqcL#=O1LW4 zpdx%1Sv-19;jGbnvfulnbu|+7>b2RPG16UU3fH)j&7_<`}g73+$;^kO64Vw6gPqLIxpJ=U>FQ^fx=z+$XSbmG2ex+L?5!_!$e8 z>MiQ(U5cUATBuav+w2Ap_SalEu3DF#Xdq7sF`%MzE=`2LeILl13kgDoGktE0d1&uZ z`O3F@`>mx3z7r}3b7N$;yfx1~Zejy&fm0-rq$L{qYC}Mku5gDz{rilNwx-c$0Dn>w zrt@04R-=d_a8K*kC73H0`p3e5T;YS>@Y$xAy)kJ4lHClR=6otArdd*Wi$b-b$*DJd*mNmVtfl6* zI{r8i)YonDA&{emRb|S!joNpwpxh;7pH6xfaH?;$UrV~w=@K?Ia6;gb%kGhIzit6P zUg7dad;pZ$w?jKH-~}4hz@ci06Gwk|oU`wvdUA-K8>m3*7V0xsQJNZbSHcYS&_jwG+kfBcF{y0@~Ys} zgYMtlbkrj!m2B4+)#+7V!n}cCw#>iXC4>-WEvW)pObjr;(iohpwoQ?g)O_oauO7=S zNN0x3;7Dc3#Yrz*qV+{H_)7XgnKWtWH&G^;Q|EZqO_TL0mFcN8C4GJne&tOYd052i z$CY>OKCjOpwX|OKf*$-G6uyu30yS_M=VhuSY~9QsW0{*EF=L(iD2r$PkN>EGo@vvz zny`Yz`m-KOJXaRf6SZD9hj&aGFR!r?*1Xdy#N?%jqM~Qg74gjf&Gpweo zArjyyq?Rxmr#xkj;LR?D|7OKmiNO@=%loCNNqhJeGcx|O-zWyf7NsvM?Y>S1Z6-yi zMQ4&Bm0}>kK^;k5)SI1GneQi&{5GOuluJ&fbMY{5%a&#c=?FIQkljARr7u4bc*=kc zA16)&bG2qw#h$U>h%9ixxBz6B)F(A%y!;Mgm3Vh#v*!NX>J)-5hR-yn6dReW4oaYN z?z?vLXC&a&BS^ogOpaJjq6j3f`0d|g!qTpn5#zbu#T+1k;0%#I#C|bcq z3@6w_P^&(O?x;hgaA%0C*kq<_{=QEex5?-3>>eiaX+&4V=+}&Si$#J^Jf0+3 zcPu7yhE2zoH-^ed`d=7B{*QtvxQQ5l;fFhVS{d}wswzKa<@nos7;&6R0#{#zJz>V# zeauWZ_e|#;Jl*{2lyov@4#!H&RJ3JzHuo@=+QfZ7iC1raBhf*C-2y3g4_>4vD7y2w z$SF_Le%&-D3VAD*v6PC>=7!6qoMr1xsWyy+tMx3gZ46MIZ(N(9g!Zw<>7UT@#^x+? zxh6BLDays>N{){V=ZtHm|AW>FA9YtsP?puoR>hT?OR@egn_M#QEjtl!oV|5bdyMrb1%|p)ihp8VA3spPdP`-hj`OkNPb8Jo$X| z!+}VtOC>sE45Cv8m^l^SM>huKXR4@jKc1C_pZ2Qt{hphIa>5TBGEuxbrl_)>n9qFA zks;}gac#lQe(#d(R}~R*IfhP?WTuz7^C!pL%CdQvYI8dY{awpE`cO*v8qAkibZJTb zhGzY2pQH;u!)7oaE<3Piqh^Rxv*~_r=WRr2U(jdaRTG#XGOR`qTT!3zP0|Jf>W3$A zhs!w-ZE!#PxT!)lCo@XiJQn`k`%>wi7>R#(Wy={98aDoXLHDYM+~uVh*Xs&0_NFI| zV?mt1$|_vJCPItE@}G+_y#<|c1Z;VZ0#%`W)ir-S2r6)qJM9t~S} zb*2>oUgyQ`RD|M6&3#^|0_*lLuB`Lu;y1h^aV^_F39W%mWa!LrusJJhWaVJi!{O51ErAPGU^$rrRbi!xX~?$Fg)V~ z59ypnFX6C^|4lv;9*NHx2CP&D27QvG)ga?Ki|Yf8^zT8|ASQ3+haXKk;zm4;sx1Tx zT^GIS&R|X8xIFA`r_V%7CB|;<*zI8TqS|>P{0<}GeBsoyU)>B$-xyqAN)H`VmMNmF z%Sta_4D{g9kfSbY!&ygo=U97iu@< zZVDlb;@PIzDczXHW^qwRWD5P zLIX|S&u*E&?|XRhSGYnsbV3sf>ZSGN9AntcxZ=ZryLt70D2fMDD-J-jb7DP6 zybV}{Sq`f{=@iSYS$ee>ikuzVtA<%x&>tc1v05IH!k0EnK)RN+U+evUMm? z%j4d+Pt>Sy9FL(+;o}krj3omzGy6gAtJ$-PL}2g*HkzHV9|>d;+oLKB`rD)q!+)4C z?Y~4t?}ILV=LKmpgCA2*Y7DvBzA>ldvAYRLHr7*biNAxCZfYopR0_3zei-7VY;h9! zq0qg7VrY|Cf74C6PYgsx!Us6>rD(8GfJdhuNj@@`Hm)^u-fhca3aeasm!B%%ZEk;k zXQfJ6C@$~KhgfrBy>)Qo6=-t1s2Tp&er5L8UXuz_Tr^ONq-g_pW~nVtJI_v^4lWzj zYp__i-|iZ>8}q>l<9iW^xM8UIm%*iCaCFw8$C}uSGk(z^VsF%f_58%8>dK+NtZZL6 zpH4Dk`y^A8UDxH9-Ok;r`m&(XcdE(}1+jqKs+E14DA$ii1}G9bywbi|gcmy0CHLub zKZJ8TvJ5pKdBWzpLBMTbcqVt3zn=7o4wUR^M&W^yxiezds=@5e5__(#54v^#l!>6I zBa?-=3G(Sr%*a$G7!O35o|lTq^A=o@@KufA%+`DxeI@ftp3tXs*C3qR z_%T^zc8GO8j#uL}jG92dNTHFp)xs9P$vx9YQ2BAm=b63RA=vvJ%8<>1$Ww3#uGp)b zxF@A&IPIt!4eN<I8Xy#Zo;= zV``_}x%|T>6L{xFqRnJGa9$a`lD4!& ztn&Qz@8LVrmoK1%X4K`ITS07QJ~!U?OG>fo8K=gByHWoh{9cBwC^$nVb$IxZ< z_B$p14_z^6a|)6(m;5q5h@=`|@esV>LcCj+G%?`~$6p=H|N0florCg)!IQ!V^bbMm z33e9|1~V*QQL88|EZJ8A0mJtjLj6s9$tl~H6{B!kiTE7ypV$Mi-j9ZM$p;x6l`^2l z(%yGX`($@;U%7?;TqeX1_)@#}Uk`m>?mPNd=8ZMIBA4=rDH-bFD?8#7R3TPwNzE~J zR{501SD3TW5>p4)<1Q(soqXMGNltK_@Lnd6SsAgj3)C*lj4HOcS~Vn&b~kn3neNf= z!S%9MWEJ9Mg2ggn@cTKk-(tbWjL(ovZG!mfLWQ}mW7 zl#j7}YClg@Az$5tm66x_46{suDJEMmgC(*FE{zC3G=~JTQyj^M1b$AR*g}J`1ia`< zkmJgQow9f}u$#D68IMEiWuj21fazVjJ`xD6E=9>V>5X_QEx-*;sOGOn@ONn=R;b)a zx1&>)Swxi4TJjnkZ?C1IyRPa>S8PJ-0U*R$n_T>h@{jt}VGeyaXFIoS7 zHIp?HEC->g2c4ic`?OQ23eRAYvr$i4kPMQ|kR7b`9auxP0Yxq+2e{P9clciwW#$)@ zLISsd(Z6A{ohnjC{yOm_?`-4T>m)WvJvetKBe^)s@n?$~s8I=L2ElfP2@6#P(Eouh zk_FJak3ROdDXuEHqo8CDsV#VLjXykxrFMJ#Z=Rs%$PG^`mTe*rBvVE5R$R_k#A5kx&@M@_l2X5EQ)bvy1bp2y z!l62YMcNxLk@ti00v7qw44U%eTrn8@-bGj@@rbq%aBjes$qA?0>ZM-LMxw__(MKI zYfG*tR|$@iaD=A=+xZb`W@v+jBc51rFTp4aM^Y#KPRm)(Y0V^Y`;^%xHzyojm{l|A zI{u7@7n01}N@c)pWFuY8{cwevjf~Tva6W`~an4M_?24p%aUP7zj%LzN``oe?%~q z;uaEtFLs1(AG7H|AegX~|A1>63pEd4s({*G=oyg6wV;T4{&c`s3qmg|3%&R9sN2Aq zH!N}FszVW(<2)-zS^H9UEiUw0ABYKTq1WeqbQAFMj;CFuuWOZvQj-&Jb;PBpeug9O zU7Fb4+zOTvd|aW=4QEVn3Vl_tbG5j3z7LmCOK3TDG}sAnO@!0#MR09_0P(ZsE-)P% z(9EJGVPHQgQ>1|o-y=eSh6*eap#DC7xBgXlH_YZhEvc=97fe zA4%zWEI%G%+BRrYA`ONPW^~27WvI+tK!dn{sMlvb1C2Z%Xl92bU>G5s-Qvvs25(h3 z$597gD+=cXm<=i%%hqIBGbzjW+oVUM*U(HlU9wNI)0tA2 zYcgc{C=hcgP%IuS#VbAG;t3Jx8VeWagX4l)e91uh)-O2`fgf85!+O(kT-|VhES2@g zT#az);Rtm4gy9C#LCOKF6Mbi1$;AuX%lpg=>yw?AZbSK`audT0Z6UKFZf@@@Xj%F5 z%@TY39t>4T_=0S`-QyU=4@X|nCzl;1kMy)#?|OF;0tGk|LYbvAQ*5#o&Uypo>S$9l zNl$Ow;VK;L96B;zcRo;pdnS}sf)1Ptq!G8A+3X{Q&KMvIBRynHI3l0;Gb6CP|2KLc z8m!O<<^B*VD~)Iu{Fy7^s?l^1y5z#uk<6PAt}X`^Hw7?J#vo4+UkRft!A+jSMqJ^< z3;(X1arLl!k)B86JCst0NQG;)BG_iH-6GY?4rG)1H;8U6wjjly-BB~0h6tKp_dKh=X4U%yT=^Kit$LSez6c*n*Cg;WW@WE95v zNSLyY3$5cmqNd#tG!<{zgd1)|=;;-*RqFef)SsApIOABrRfUtqzAk*Z5{Y+r9+1sE zDfJWUCEYizJ^U`@Pp#eHeZ###iAXzxiJ0Wfn^JS{aBNV;$Po8SDeLg3hG10jh1>^r z)Hml^;2Q|x=Fizw4G>$BU>(s^C!~JINKv?TOCt9?7%GX~>oF)7CTW6}J_3MkE+zQ= zpD@`l5aTCd@_IUx`vq~caQn9`iw!?u$~$5ud7~ztoHA_fncw`)BX2I|$?`opGz4=mi}Dr44l*PRHxTzP;qw3$pkaQV!G zUCJA6K6i3+lz;9i-0c-euR+t@m%wj;z*U$!DUo{#k%ln!1K24v_h~yUs1g9u@@O}V z%!KKm1M%`jnBiid3;t;6qm(WTY2bBX&aCF_Eaww9nlwK6TY}8^UTQ<90 z)!TSTl+=9XL0^y^G$olDnOu=4RxVy)EwaL?5?6yKG#f&TNdP&bLCYvQfJbX6U(abW zn^4kUGWm1ATkE&1wD;ZNchesx1fja{xG)i~28GAd*d4j>gm#49)kt~N)j|q(q}@KS zqbE<&gk`maCyqqm1+?&_VKxTDMpMe)Ssq){GYb#A{DFS~{3u5f+0XMq4Y|7jT;&V>Le-g*l!MBb#2 zf#TwDSX7ApgqNmTF!$`zmP8r_h|2~bVW^F-^o?LD@5(`6_I0%IDt(2KgYar;w%+_% z?0%+cokV+ULP1#P?(zDUe>x9^rS%4iYiSAij8S;K|8=Iwuh-t>O3`55I9<$m-;A@s z1fB5aB@3zto!+dW+t>tdN68Vl&q%2d5Vs%E*R&HFv15e-??Wv&ioNNOb6wAe5d))^q~_t`q;~(-5?uNq4GqZd(7muusQ+bPvO%@ z2|m*kK237up3KEcnK>Irkx+*5m9Qu#5Pw`vSo9HA2iNggy;RTRHrrlU-Z$Qow=H%B zBDFHl<&|V8!DV`GDAj9}Q>1mIXn|$QKngD@J4Vk#mvXh}<~;wR!?fxPE|4liZ!czm zpO3>SuFA%T!NQkYu4B$$_-edAy#fTUPH-Zw?gv0huhuf-Qg%#AJ+Qo&jfn$c`6Bv& zx!o03{%ndByA$aVK)mTk=Wule0IA%mgD1ELCR5^y^kB8~oQJB@`pC zni#rE&u%u;($8Cn;kj48B$pHVpEX@>^O~?ncf#7C+4xvoSo@CN<-U&uNPYHU=?~ON zH)18(GrBf~SsedK|C&wh0jrF@#`oF&dIOm;S5ICi`O8ad`c&rqJLHk}{I{TEF|hjn z4vzIZbbfw(4P}1WPrvVazn`!+i`Ge+>JK*WjUJNqQ#_425z4%uhPe`{-w^24GFI}G zf4%v`#aaFmgVsK)(NY4>hTYsK=JGU_eq%c7cOIwC{XjkTr=&mrdZVyenFh1?;qRh- z;+y))WJHB4Z}vV%8j-ZZ0t^IM2)Gs_YkUePY?*$Rc7gJrwlVhvx%?Xy&UE0vTx0IL zC05>y$3(NRwZAdb`>kMTv!I*SC-f(#%|zsuG3FRWq}mOgC=s>ltSuX@LPpfP@(%8( z?YyJtTora(#{UP%AT9~dh=|wdrfoG8sDz=sZB^&G#*8cNj#M9E5F!O%EA%=<3OlU1 zce|jO!ZWHZK4*z@N#f}eHW2p?LM># zBBiay`2B=P>4Gu70~aY(_Cd`gQdTua4^E_P=_Wps0z=6%_WA0QW7-`J*QX~krLV*zpAZEV_rBL@Q`?v5( z5bAo7$Wq3YBBAJLI4U}kT932hAU<@Xwv8v&-)Q&v?wh07%~r%!2Z^I`sSA^M)oEmO zzv60R#0>+S`gCjbDMadN#$59*tlYQ43t*83S0+yR1q&`5$9pNPcJ5l%r; z=iSEmL6fNSE%tg-q@`($!MRAwY9qSWBCUHHF{BZRC!NLcQlzaY!|+n1y~-M+7LoQP z_P|;saj+Jb;wJco@>Em)1#Bp4(8aN!WJ-3VqvjUr48G4*;UIL{&Tt>}p&~tNgyujb zE!`+C(}C7at~^EMS@RydH*LVy;M$3FBR8Ub66rzDPFSwmh}WPZeO2qefjE2JZ9L`8 zpF&eFYd^&{uflMNU3vAhMl2~9mazn3mA|mTLL)SzB7Kl02m(c2)Qq_j97UHQMqDil zOBa75@fqBq;v`=1lk>89gEvgZ<`WUOyAL}sUG7+m^Ke5xy7EercP-Zp+vJWkxENiv zj4}QZbscWa)#5I8jW7~_#8s(umL4VX9;6kPn^uMjs( zq>Fl8fsRjLKB$Sp5_em)fmTfyFK`{ zz+1%=dqHNVy^S$86q&+0ngSEkO>`7j(*%(IkHfhSIAgOh;b>8HT$2Es@!NP; zVjo}rGv~S#*=)f*JLAgRB`LkEB+*kv*l3_PH*mwqS!BK_2=nA3^XJCgdxrQI&d#kx zn6-8+o*2zg-tF4@>tkbnx-dnE+Q{N=I7R>>%i(TJpIL5_n+D4qYq|%^`m=1{0}dLa zLn#{YNX9hvz|q$DG(j{lDhLg@Xkb+xT@Di)^qlgKo=4~Y&JPgMy;BB5ZwzT+5{1@aB zh`217_$UfH8OM|PpDj~VbZS5xgexMef!26aF0xwWCeFdrU98IM7C!Rz_lJpzNTZ9m z=NrXuyMQ$G59Qf=J$cjmeP$!#!2b|ghgpk_P`vtv^0uazR=&FM*pM%ODl-JGVUe%ZGTsG2b;<(L2c}tHzs2UKpa5loikv=boE#-)Q z##}KT`4Lm5(^FVputrx-Gy)F8GAKueq~q1JXq3>HY5GwEH!^V^b;B6FSJ9}?GV}#S zHUZX53)rN}s0x&}TGreLJhiqHjl^Ad!zj84cx%2#|5Od%V2z_dakt$h<9?tHvEy4& z@Ww=THXzU7@`&ucteG6!C%Z8nY+nTi2Ahl)g=6|(G&)8m9)Jr!dK<4~fB&`zKlW<2 zFU}YZYqmIJTun^-<|q>{%0C!Gbq($Dcgxv-W=bju-5Nu$nELYDp}nxpK`}F6V=k>_ z?uf%k6TGAmIm|VYZ0N^Yx#Z9-x4k>Hw_1bPW44&x6gkv-n&!B%9=E2`*Q5QV?NrHj ztgY@D8Z4>*4~pCD80tw!z}y}4={_dL&LN)6);ZrX!SC@z&h;jyek!baMh=~DaJOZ9 zbJ!dJS&JqDplMV?SRfa>+1}eCorf+;eDm zC8uHSRJg=0?@TbOE*h&Q9({BHql(X=hKr(yw^Mc6!k~-N>A_~DQSHzhgBjG7PoJoc z`VaaUw{vW;mXCs?yhA=!FI;cijy9jqR?US^U2iA+2R-S>poeql#XFV%L3gJ!4$(RE zsPk_7cIZpggJWOVF!6NNhM6g`#`j{V!;oJt?PTZ=xq8i)1TL3v6Q3H~yEd*J>x0;G z6UiwV>mn_Nc56#>Yscag#&tE3l*h_-9=IrG{7(?T$ zOy{*@^UE3k_!zpzaCt~O^v_!c-JMQTu3r8Rx^j%1KcBuEGk8He6rDM@Gh(K?-SSLy zV&!eKa%gJMk|pg_&(OV@Ake~Oyl8@fiRo+kB5ci}lhW2iwv!!{Ls#U`)14o#Y=@$Q zGGUq6zPT&I_9!Q(=1y4Cjy+nf#M*`nxjoWtnWIGm*(d5tK^V%Sa5})9v=zc4*pP z1|6MFjo+%DYloW4Rong;y4cPpw;ftY8MH8lzU3AF2hHd%KdXFte%#XY?a=QSJGie6 z!MhKUd%dm1ZKiyGW9XzYFK)M!VFWV8$%Ly~fR&|M_FqRbZck#UmGjfP?PS67KH~ZG z(yE{TK_@(8EcfS7n;m`bwL{SsPkH1i5zCcn-*VC`&-Q*h))25BR$``}Jtmk)R<$x7 zE7Iv;+0X~=RI!Ln^2E;wMIP0*c;zDU?B~fWm1m3z3y?fdtz|xz*FdrTo}Sp(I>T}j zR46KM=OR>Kypz)D!%LGZ+M!jpY+TFu^rHT&|DfojdTkPW&%4Fi?0f~Hc#P&6Er5lw_)xBiD+Q|mU73Z}W zI(5^B|Dc29r*?KaP46}I_kY`loEDidch@HsP6nD?pEov`oD}&Qn6U8D*LAaaX{|Ep z6;rD0GkHp<#Ndpy79qVhhwE}y{kS#``22=b_7B0j^x8h`3eG$UGuFUCYOnuGANUo_KC zp6i&o-i>*8GmB;TN<%bjyfuEGDVh~eN8yErC^)~4g`2Y>zFq?590wWmGv{2h#%Fz^ zInCBAZl4Ro$32b?N`u)CBKKS~B{WU2B_PwAAkV;sH{kzCg;Ee{eu0}r8>YMFB6_K8zOPB||T zcN}ox?tL(oB?@14mW@JYc^`j?K_r8@VUlXuV48qWWknJ9?sGM&0EtYXk<5!;q1uQ0 zD*(ufL3BSp3KOk-6~uko4wY3S((ub|(W-J8^L$sk%d`u@De5&iSoz4VOs|R~haLXH z2!3MMtKWu;cY4FnEt6F~r8#@weo(p+ht~;kQ6bi>uc#~;odI2~F`$XsLj*t^2SPY) zm-l|7t_3JhMPGp``u=oG*ooHmm8ajuS@B}*VOFW~vbf$pM}N)gialT& zDW=hl1)J*}I&-bbSn;)Ds%YH~YnE?XS7eQsRigE3bu6M@-)haoC)O&68o`ia(`YaY zTQ?|1;3GTHhA3ke=WfW7i^qm4W4vJ$#S7C}*cg8-h;8&njXL)4O{5IJXB2HJ=k%Cd zZE3oX_nV@G8l!tq&9Ibj1ba~DWXX5FV-0dQ&&eJI{O1qP5BtQ@&?=eD!w zHYfgt(`^exXC|D*yK2!{1{e8TM5M8-I zCyVz^g^}fKQ}O&TY*)gi4QVJAs6DP37~|EA=$ex?OLkm~k>RbG=vvEJ?iY4*{WKlP zln7}A!pgrH*+kWe~nx6+Ida8;SFKeC$3mHc!H2ED!ahM&Y!0gAedukgPH$Gc)s%)vp7{AM+5; z&KR>be^x3_r#_$LhBptQ=lf;&E?V?L_bh(5C3^9M8-ovwM6a~!C=RSdLB>pS$%-KS zRos z(Yp#`?&V*oyjQx17hWdqbc^aP_a87d#|!5OuI%BKqO0$Tn+?elNC1;5i1 zRj&^cmwP}br%ILYU%pb3yxUs(PeblfR=3Ky@3m0*Dl-z(Z!m|?rOGRh?<{>ed(8$^ z0B9M$o(aP1Nl}e9bb{*ZTYs7MpCv$isZ=?lh`fAHoEHJz8I&pS z+!gt5`9v-h=QI;I)b4;e;bzv|GG?AxUG-VENA+oExiZYSf1eyjoq_UuIZfbRp%nj| zZ^bpCK>W-;LmvRDVJcW#oY=1#H`7W^g*b8RtVoCdbkvoJm|Lgc5sF%~akkk0ok*Fy zqU#hoBIl_xsA5N`B}6Rx-2V&3p*P+TpAkYo=P7?9*8v;Stc1{s9oy7GuN9Q9(h=$p zz2;HAZAa*5HD%IF`8_*AO~uNj56AD+5$XfNHJRh{J3@;MR7fAfS68t4Uud%k2L3x% z%X>0(>qz+@Iu3lEkU>9ng#MC4WxNj11~mp9({U9k@^28^m&>s(9gsv*Q-ypb{C;IL zEG3NGJ+QdTp59sG9?f8XEnuVu8JIPO_Ab3R*k@tk7`#|$aus{;Gh|q!GTKvZkjlfm z+o;jNyxEok4BEAfF0Py;cy%wTVdC-wCp_hUVdQWb{cRs0JRnUl_Q~5y7Veh+HG(oa z@@J;#=!wabqD!9Cp)cBWNo?;e|63*6W%L*+>Ac#+HFe#UVMhyqY#$>(i}m{W;Bo)ID}+QvS>}8Et%(jVZ5;?&uTS zqw~j^9_8e+fua0QV~j5#bHoFCu05c6_tnma&aY21+6mwu{Z#2SVE3ur3wl1fQ;X{U zPl=Xy^gQ`r3n`=59?ddtQu+Ms3+KB$c`0KxRD(no%<5aw+4i;XfT{Iszp}r6p|hIa z|5V%0=Vz2tKVRpD0Aoh0r9lR{=MKsV*Ij73z0Ytwkj)Nad$IgaNsKO|^9AZworGTL zqZ78OPF}>?>?j~Mqc&K5-GBdO+c57GzWnb2#gtKV#XBPh1ea(O=ldI>`_Y^xKD}Aq z&w?`gaH+s%$dOHBVia%9JFt#HHCNMbi{fpDwViOkTQE|)#7MrV1w`kbwoAjfhAYRy zLeD+B#`yQwAfrN`xy0oBD4cR@%at#m*{n{K(SD_#V|B~on|t)?x2TW&?{dT!5TB2w zQNyX4oogWk`>9QfZE zV((A#aStt{jsw?C3|_p*+A^!t+|R-C4K5&dMcE#^ZfTugW4qAwjlX;a3P^t5We-2E zmTxyp-`v^tkU>2(h-wqha|dZ$){}313qAV3MPhrsaK@=+6 z{pA76;>D}1r`NkMsE-Eu7_=zpUF+O&#%izDnsj5F))$ZvW3L$_mOIg0)z6Vl@{#r{ zAV=eaXS^Bu`Iwrk?nF$RwWNscU6L6m#R9U=L}k|IX(m!`+gQUW`J7c()7I-Y!3!tq z&gZ&k^_nE#!GmRV{N@95!fkAZ~PI$ps{S!s&%+qK`Rec$=Rce#)S;%ji~5gHUs!tAp`^ zJ^mO+|ML@%o+|&#S0l^lg%rWkf0I)LqZUki<33N`YXNz`-hbJL&I6WJJ`UD?D?dT~ z3&{7Qr+ zKl>o#G`@^}>d`Cevd*7*J}@ z%d6UU)JQRqX*poZ)Ks^|QMC&+kN+QOn1k8|E$ZKK59>LJ>ob)@rf6vs}D}m(hTH zaH&u8-+idPIK^1LsBvZV$}pGBTT+{!m`^zyUN6^*ie)s9Ufnuwd&XVA2{GL}zn69}l(F^hTecl!DKH zGwp+8{~eQympN!fEv;Sm{zf>7qQA$Wm)LenY~LiGT|faz*ZR9hOL3ko`pWu|me<+r zj=)IO_oOazYAE#C5T*7v24{#Ppk0{8_(4f8^FLseqVt z|CpQMnfb3l>s^*3@(&l=Pg~Eh&8z4Yi)oh~$F1pCSh#(Rqx|rD6cDB3r>->juN|<- zp~&3(tXvfrkmM0#ZYYIR?3_5;&Orq^JrR2=%GK0_0&ol5?ydV9_+?IBdY|F)k)A3b z4F!Yqw0dM$A1u7`r&EdikH~3|D3{}Tm$OtCZ+Y3<-P)W%g3KqH71Ae+xCoqb4lK-ZW-NVPD?9$xVhe+WTgC0evpN$>A%#Y zl}(;CFZ$M6&s&Y0lEvQIi3}TBMr-dHeAPSg`ADGl;+^sGm6)nQ9y^D9y^&;OU*h@H zAWg2ORLkgNhtW0PySGV|H)J_=kxNRX2H6+U_uIKJX_4E-q%bq!&v9b=3tiF|w zi+{dx&B^$g_;c+5`AEYx$dUMs&8>r1O54j;s;sk+j~7>cS1b)C7trdXM&gTKV8CS1jh zz!+Ig?MejxYNDF@V1M%e0Z>Z=1QY-O00;m8000000002lTmS%Ng8%>u02u&fUvqGI zcP?o$I4J`F009)p2>|+z-px;<&8^`xXA9KgRWD6E3tUXk_j~Wm)<%y_sZF8iNfbf| zMF=5;B7_h^-XVk}gb<3b2qA=!_o7&YMF=6jA%wiUEBVhk_vZKieLmgJ%$zxM-sjBp z{ki-1?q6!e7+_bX}mMpVvM!pbzyO z(3h4^4c5!MaxKW$c?ex2v*J%U2__qM zlD&aD1=Ci;NtXvd1k<$(h~uEqf>~|9dfzVXG|p9s2H?X~d|TrnZ^_(^q@$2z6X$5s#%FLY=KGNb8d! zLY5Zx?=e!SJ1mrR9X^94 z_A_z6Ki$-Q;aV&ADa)#Ow!Ma!awnw<@q-mmFXgB8safz9!Z{L)2G8~cT z(tP&M=0b;l`BWdT@DO|~J&E~~27*sl2eRRITft{c8fkbtPw@GDon*oKauT!btkC($QNytc=a%LZTm;{d*NOSb3xc1E17GWx;MaWwxlVfs ze!B;ekacRk-!HaDHF~zt<@Ypx*E^x>#J!}G;SjyQ88?A&yH^U`=H!#OIdY+U2M4oh zZ<@Eg9pKTn%P!Bh*(<#C-N$g|#K@qHzQ+uH0%;IBMCkdnMeRQiquLwDr?hW5I9ljs z)|2Ruofdix8N~lMCG^F!0D8a-J_2f(MRj_(|^V zH>iaDeCy!%^I|u@g0VgP9FO((o9G+pH>hbJA^4gDS?@7c2!1=+`rzUmzv=T%`VI0u zO=nXoFEN+N`CJ# z9kji}M{0Y&-gl$-m+yCbx0!jbcV_cKVbsNqWUgCR{pf+*U9x291|gzxU9u{Fhd!b$ zSDznMtRFjopTe*HD~!J|g{-)HNgvsi+esFmb=6O}DPKpv9Pt#U4jVznx@JUbqmwbPqY)N;hEcwrY%{i2`epLl3&5T!pd)_Nv**q!m5My`Q=abtFCg*Ny_J+LejVMM4{XvtUmIdw+^FBt!Z zhQfwl?MTuU8zIGG7P&ZOtdO$1KbiexuaNR>u4%o@1{44HBW#@5h|Dr-q2IWQ+i&6c z`S_$fRqmt&XU(Lijjm35)TTh#+<7dS_QG1fc{g{9#0=Rgq)k0R8V_qKY-zla+zVeM zY&m5^nr+`NY`N2nd``Ts-}0M#Mt-dNCZs>=M$VPF2-|G?^UGcd+a_KmM~a&W+dce9 z`R2jG_NjHrwuw`O?Mwd{Ua@a6ZEb5WVaKBSwfh_&F|DFFeA>w5`NEDJ9m(*FlfsUt zHe`m&E&Yyf@)qQqww17_aeWforIWCyb3Rct`7Z1wC&=FGOZ0nBarKGIo@v6q`ya`% z4qb%Ii>=AdJEMh6R)DLT2>aV~s=fE_lezn*J)e6o`h~E6&H}SPDem(ov~MQ$5N2S-XWJel=@e1|7OjCJeh5qCiZUxCrcrBdzu-dRr=ip_r@*zU* zoKQ06zceBD&TG=}`6S`=@lewDM6z(Ec7GCIKVLXA%$&T>d@7uow1TMLb`#Egt52rR z9W0zRdq&>8?w~*Wf&0N%&JfN&uSbs9+6(7@nUgc2orDX9`MlF9;lhnxB&&m!aM5BT z=|5|O{^C9s8g79az2-CD%+~H~*y?_=;j5LKM&r+=t`V!(jfhx1@N<-K#lwm0XuDE> zWgs_~Z@ELryP+p*8d?hZR~#(TDgxGeaDCU()&19=x;0?!`8T0L{^!-?#nYR@)%hpL z#1G^2*F3lfv{@`!FI=ZO>zMi4^=D(RuRn0+=6d1z?ezz5+*$8=@}Y43zXxQvp_6dq zRDII7PiNuA{dTo2T8`ULzG>WssMZsOn`W%V8Sz?wb05oMf%ad7f)|7MD|+Ep%W>qD z&nn^8Wttv*Tl{7v2e%Mf&9t&D%|OiPevwA74H0X zBBjcug069Y^5^I^K{q3f-&88xt<``;&~d_D7bmlZH`7v68th4}ym2s9J@T+{cVKIB zt8=k%cX>}<@kY3NyOG(T%k9(PXSlv=+7NXQ{oQZeAaXIhRJh-xHTinVRJcFCzS+aT zo?Eus_u4|_y|=tc2^8+{SWMbqxhfPkd_X)qY|$5f;|CGz`v-(aJz5aMTaWdR=5VV? zx4aI*V{(aPKFkvyUs*=99Xbk6dNd=JA>)K6GrsW2xx&-Y+sWyL>B7^$&&cQK{z5V9 zc??wRglD!k#Q$ZM{#h%o1>f$4@O*T4vMD}ac)ri3_Wr{!cBQ>}yUXFtd*L~2GdfNj ztAEj%)tJop`jVl%DGA6e*S{Li+AgCft@ZzX;QI2KDE;fUTwUH^h47}AGdcG@NO&_f zjVvDZLU?Pq+G6&-@B=2ogae5V6Awg>m~_A{Z@Tc7l?}d!PI&vP8~@~yzN|gVvk4oc z_3y^>Zv2IQ!iP0i$)5Rgp?sh@*;isJl#f|VJ`7nRl>aw@DDFKLKK5=_`_G_vhb~-s zcj(8G_xg|XS#I-w+C}(0(p>l)=|w)ZUm$$mdPq_A{_o*B9`uM%5fE)yE2GDe>>oXZ zic@1PKF!EIa(hnh5$D6XM;Z(`ccj!JU;o90uV1f0@t~u=&J z^QRMj6%{9zh5Z$TgU!eR+ENf6H6sq|hwAl&p63sy>#HyD?Q7>f*>gH(>AusZ5B68d zIL>wje8cSEo1r4p)mH#ITfcTn{le3e7e73GX~kohdAgVcxA$TYKWsJW9=r{px9`Y+ z=!py>ciIu^l=Vl(QA5s&cug6}Cgn~0kX!o!Q9h>`Sy*|4Ez5Va_GV1Gr)<)&ZAW6H z_zTdCv1ECza0V$zfA}ih8LA?fk08&BHnNFYr=0lVGXZ!q*zoG-*B9N-m9tT8R&^W< zjRBf!Q@hvp2AX+eTs1jMn#0VhLF7aA7=T_}BEEAf8DzZU5Xs-Lm(e!q&6|*8)xK=f zY{*#h$a^zDXSWd-Z6gLLhd5bGNo$?A;#ljv5 zcs@h4_*<7RJ;wmcI&H|}33`B7biRvi1l26A?lh~xH-ySm8)R0Z8zOl>F{e;nMvfh|cX7_d-yxH{j z(VKHFow<2!$$6N)IobNd7^8yI24)3g1C<5WdRi59J84~zRcXssZCOk|?%5rn8O!)l z6BtF?pIj~e&0scF->MFL6)O0;MjA#m7TMP@2zK0DcR&a;JOnbX*dJh+~jOXn=hd8w*O5g$1WEz$h+TU z@*t!EKxbQ!?Hh+MMDGt=3ljg#hE2A&?MU1kfgRfq`9sFdGGkDOfg4G~9pFqI-kKib zXa8VOuk~DeGPNA!>wVLlL<|`Q(9@1Yk+*?Cf!2e_>x%0DP3op-zwArd@a;kw40Dx+ zDwXWryNNub5_paI;l93*2>P}i%-aFEzN_*{$yKm%zkcRq?^Yv#Mt&jTLm@JP`n4nT zK7cO-EpNduTnWI*_GC%3K>&SzMmCjHGpIk~sc&F?fS!&eOW(vYXh7$UWV98K8*qOe zdHpJZK?BEJB2Jb+87sk!IR`Q$A5=7`Yd$e)3i)fWVK?&N&ndP%xHr3+j(Y*IF{Ixt za&sX#`H(Av43!q0zLt0H!bTyie7p1o+lREhPEK5J3(((=X7?M8|E8EX_1n`Cv%Vd@ zFrUqac3noCt?vMIZ4fc+>&&2`wslFtla2s+`0;17?98E4_~&G^qYs-K-g^{jRx1&p zgEnNv(meoOnascI!SF^j=i8Hux~pt1eEeAJ0WF<{ADf&7SvyZ*%UEw=jR%EKm4{)aM~m7S(+h>cvz`hI=RSv-`5Vcp zJ(C$U+RP2UUeShOSr7F2uZm!_`w)3zI-RjGrXTM>#!YI-z_C+V($QH#h8VZciHvzP z0-&2GNVOUg^Y}&e3GD>5#&7*Whb*59%ta>KQRfSK$a;}kakTN$Tc!-0;BH5|B@fI3 zFu;zknz-O9qdy^nlhN_MCp)p}i4moA)Ax`_Ha#hT^QF%p=RtHvCHl~ZG9!qMsJvq1 z`vq%Ol#dJw8u)pPo?%YT;5_Nm|6b-ZcuFBRlg^1sug6fM1EOe;OT&TP=o~f8?d-6N zfzyKh>B@y|;s8udr|rwlpkPh+3Zmx2Uw!~EKAURmX0Bo246g_pRWQw$5uU+P8SS}0 zXA7I2neI=2_ByzoP0uXhEU8O${pBxuON9oShOz6PxV(0$@cf^*MV9Vt!7P zFFpLU6I81?S(dcQ^HMQ@niy(3c%+trG2T(Mt7~_E05f9f`H5u^iF56IRl4>c`pu4f zWjAM!*n)dPXdEvYXx7(}ig<~C~+528qHGznmmpGG_Jnjm? zKd;J@p1x!VlFWBMN2l)Ha~i<1RQl=0)~O7PjdP^Oi!lIVGt{*Hz-dhwxWGS#u3W8~ z0pPh1Iw9WLP9h;7;;v(p`l<{*Jyd<1c(zf6I+c1=+ z(e||6*HhtadTBBjP1}EYS;^pKGCqqo+E$=q@bWCqiw5s0wqWRqC0cr7&o6`5sp z-?zcf7`QT7Ne##MYr~LNsySEMwQ$2%HoZ#wodz8L4IZ3yu86Ak_P)u$)hTgw-`Hbd zmNk}+v~OqIw*Y!;Xy*RMT>-3AQ}33iE;4X!Y98I*>ngMg$zddeZdyL=8-r71Ts&<$ zct51{jqy<=r_=zT&vmI!u^L3#M7a=pX3bvkq|IR^^wc{m$Ze@UzVy3l&U*l3v#C$e zU;xwH6X}z|`|kso6GjbM-D}NAY*BIwdT+zhpKN-os*v{GeD@%L;7LKA#y&tb@F zC8wp~Ym^W_JBxB?|MeGv#;zpJQE}yMr`TT#L&!0^v-9ZP9h&J3%$Vs(`!26q0APka zH7AZo0nAIKAGQ9u4BQi@rH|i6|7PG`$5?t~!d&qBz3M2MVkw6fVxKXKvi4n8LUGPi zW-F%sR_?i4SSuce8ZB)(AqYB={fYMUckc!}0L%=d)jvB$G4KElrsFOr9Asn;gfKE4 zEvulvIT#jAJ1(m~0KkF_+C8-4I);%I%|+67i)Vzf=|e?%wD$ZjP(}}@+bctVdw*D& zKk8$|44=_MqZ|tv_TfC{loQ*u4P)?;LOzm`C*{E6F&|Ik_QN%=b{>tL`DFGRQ0uYi z02;J731a+MZZKV4UUZ3p$Bn(|%p<2ED;@W@qg$SrasY-WQF+?qUX1GTa&nILIiqjM zrcY(+==9$=Ay#u^LTItsB`2zhz{tA>DJ9b$ga^BiiCG8P_H0w%zU7m4~mV&h}Nf^#vBsyV}#R zWlE^L=UKLV=DY@?`+SZ$X}Ag6r}KG9bn2XCtr>VBD3H#se}5c+={eLvegJCO#h@r^ z-l@+G0MB{S6^i$eDK3`#(n|XT=n6FMTrgD|mV&omic6utR2QKQxolZL@9sSdS@?2E zAT_tC3!T{&cW;_|GIu1~cO`;Lp*NomhQQ9N)EYN>b*R8^y+u!2ZyV(K{5;-=Uby{n z1B11!G}E7V_rs;+dI%%tYt;w9j7YlqQwzw~ z@I7~VGM!L9IEA4+Iu}mszI z+M@Im#KjX$hT?!jhvo`5S4TE{8sSL49UBQIeVUj|4}UPRVqmc zub<1nXR;9bVd03i0EX!3gcI)|{GYq#QtLm{t=KuwOSn`;L7hd5!%n(D2T&5DQmE%9 z88$9`I*cusBrC${*gmR33@)qSl2yj*b5?7rpPxCFuYEt)>a3+}$4%2GI6VqrTi++= zP?IguAn6BxdpbYQ0t&!~2o?2vvjTeA53w40F{7Rb1Iv8^=%&$i;KEiOUqS~)8LQnO@oeQ8RMt|MOroYZCp$B&7LbCc6oJ(JA{{^|{yRjV&D%&!aA%Az} z-08Hl`d)1MNANkS-S!i%7(bO|wC$l3$R$53b7}f(Gw4))Wvl3wF&YSgUs`|q;c9Fr zMxxS~kEZSJT!HMbkBy=yR?Pr6K*+y?`>MW-`QO8bGdHp6Dqj_)u6Lk$s0#6>ZJ$Lz z^IMfXlRRtwi9vrze>%nZ2mvrSMp>5nr}m{rH%CqwUAft`b_k^TKLz$wx4AKx?oTFWImA4Wdg= zcvJ#dkVjW$KUvK{gJfT7xYOYmfI3SW-fhi5086VBXG15yc^I(@hK40}#)%d}=P%zD zy*sjHbyGH_konNB_g^Fc=%b>?ZnV;{qZPq&Px|c{4;Uu#c64^OLo|TuJlZm09?KQH zX+#dS3;flMA)BdKK+bHp?HHR@dX>{zJ$%_cpEtMnqES<3`~{HpbYGmW7Xp~-W8BJj zLjM6#gDUxTr94a(#VWb_&z8HKv5xhemFGRMVj(N#e4UV)bkCmXT99B;3}AsbH9qOV49Hti z6|H~lf*0f5%8t{}JF`3{uxaaJA8JtAgoO#OQU%bxfg67V7#^jhhe9(=PR$6Z6>zKY zg->7$m@X_+8q_+ZIT0cAh}(H&>$1ZJtS{qj^1|u9lxr+u@%0?T=(WOO^#F{Ev3#H@ z{k8em?xJme^0k3`d>Yj0UXPVI-j;I4RM{k=8AG#EM!M$jTjV}46z@9wn^%sS zdcW#dZtGUYA4VqiFM8UyR{suB55H`DrDR*}QdqOJb96jw;rROJ!sz-IgTaUmR9Q6q z`>5p%qd^{*NUcuDS?TBP1CtcD2j8D=ax4urV_y_PyE%4aO(E~#SxU+HB5;pJmLar% z(~Shcs36txde6$nT3^Wti&bnqUoTIs;tuFEd5BnXx*}q38n|%kJN9HWhtdM_;g5}6O zx}ejr^$c`z51{6B_-g=Xrcl+`Al8uat_qe$)^Q2980;3ryHi!i^dN@PG}e-8ZXE~& zFxio=d%ooj!)Thr#n5u=&llKqGxti`b4?uxyylMfmZK+Kd==7l4R7+yH#aUlq1&N@ z-j1*oltllko=*g@IEN-1c4Eya?~%+E()Dh+0-J6Tp;P&I#J)*3EnIr5>d^8*KNpOB z_|RuXh@uF*ucdz>ZD{%NHUnF!?C3&eJSe7>m$z~EfTf$-AM5ZdPSMpHrlM16k&%L3 z;CL?uD=!BA=Ka}pYu{j6xA4{=08=99{44E;0$9mzBfi7_V_+L^UpjGdqZ9yR>}j;s zT3-NDOX-(W(X7hwZDZ}}-mR@p1DKviO$QxeT{iC>?no^+xmy4jA4Jc0n*IgA!c-bz zzLE75e7h(++W4u;6u?AB<ij6n{b%U@nT)YVbD#xvH;rJ&y7_* zzP+ct^5BKp&0hQ7$oa9qTZaoA!&)&_S6yAyT_I##6U9dmx>5$#kc`J|2dk?NV4l${LmchB=D&O3pL9)>o? zV|p*BtIugw^5W9;9x%mv;~doj1GcA!n&rMnGdo{qaKJgh}B*+4DoPb71X0$^6 zJu~o7%C#&u3{^!~uH05%89(qy$6MTej1jOLYFNvbzopvKrGGOed!H=c>&bV`XD&ZaN}~iw)H4A zfRzFCYg;wDjqxK`b^r7G-~#|dGHI{pd#|zsN6Of3K0Ch7f78FR7K9&d>`v#lc(DOMZ+CiF zH+%#;U~~jMN3Wh9GLXUJ?Rjsij~z9Mp-c$LqnishL0z0^XJ`DVmz!eWjH@;u-w9x! zi9T_3@^`=10G>;tCJ)ZA8wEcppp0H}@?ke)KFZUZUZ~Qt&X13Z5e*mHUe1WPWxH=Rxpeyl=b7MKdqU^rl+R*(9HuZpj=KZjG#$_JF!a_AMMST(3r!! zA<@oIInu3_qajdd_yjA~qz;-8x$kTU43m9b8}2r0sI)t=`|(CE^ARK3k4-%j^6R+m z{=mLLu4~5B?$Ywfz_93kou>B$^zvZ($<=|iR{Ts~4Yj+icV^%$Sr*OKyJWCaXJxUV zsH$D3j7`tUji8xT3BY-bT{^AQ3_l29uA^o3sfq9N7G#<%`!H<8#a=0VY|M6uqL@+= ztMGIiI5W1dF>C7h*l?Edd#;AlVrP2Mti^|+(THWK=A(+g4q$N>{WG!ULk2ELjG|+% zZv$sp@IBpll8v3!j$1F@?ynmG?y)eXl%D-rEN6!-ieZ^J&Z+rjHXWCpK}R;9zn@Jn z4(6iinIuD~5KHpHm38+k|CW!s;83ea<8>p~c3#Y;m%2yMuYtoM7cWg!()_Y#pz@_G z5q+^(D+91Fo}QTL)Pj+V58)Ja>$slmrob-?PgXo^=y2@i&Oe}*WfgI>ZIl`|Bt(1C zy|1G_unh@nE}LHKdz>})d}2_hrGwj{#(R1Wb!+;ub!TI<_ahn|cb?CdR>-PoRl+?u zctw@ganOm)j~DI#e(=VxTE0_Wwl&?Ty}ovOn&R(Xn>`C^ZHvmwTH*qzD~qEnLxR2x zE^pm2t@Wlnzth+1?W*%R3c_MlP!JteGOYuE>T|}eYx(SHaH6EovyH8W*zi>g*^Q_cRXlveV9!-c_`D$B=ghvf`pI zH3To+73)JS_Z_tdFt>^-291XfWjD)t72%P68FEH|X%>BN{%RhBkA3I7RVU7^O8et` zP1(*hLGfvE{MO*Nb3XD0)6Lno93LNAa`($009n5~cT5v7Lr#G|t*TrKeZ&c+iVmFC z>NKNu!j<{7{7ikQ;U`mrY3Pu~&`6&w&r@ym`!e?AX)-FWci6BYExXUMX~18r>--;d z-={Pc^g*BX;E1_Q6XDYgW97%Z7 z@Ucsn28OI-O7_o#j!~1UrH?9B-eusWa35veS_O^1tu}FLpWZI)^yz_62`**C)0G*2 z;L32>HQ4SAhxD*7OJCNzIn9iwP(zqHQ``D!vN64z6DL&L7sM$lKSE1{BI zcURK>xBCLq*L_@RSgjM_oYy0~ErsjN?G{!RyIAmDJL%E}Hq5-_LbK&MUSv&MA2|p24f~ zV8`+@rVi5hu9AI~9|KV$7Ov#+gx&00?id(Hgs~Ndk z&XOuIy89oB!2*_S9M1Yq5>OGrzCA-2CTWz$#31VyH)h`xG7}C|(qlWY(6)XAvSsJY zlt@xm`~N$B?_}oC5zLf z+c9mwImEN>oXpk2sZ-A}4U_0wEPaw0@P{E1NxeDd&cyX-QyhUsfBIRYx>$ISsxsi} z71)Zrd1m{M=dT)9cHD=M7_gY9*FR?NEoLH9?m-N&^kWRtVYQkzkp-bFv~~A>Xv*{& z*CA<>!PNWbg07w#F*hPn*>K!YT{Dmu9iiJ7=&38@9BWNT8)McIk&qPN!_C+d`8G%M zENx^BqnEw^X?2Z`#XA`{O^I@Y#4`$*kCNDpt$=tiz&&qG;_caX>awH@GTa|rohnAN zYD*pJyVM9r8}PzBhLUa7V35Y+g9L~Pg%>s~`Y<7Dw z;27+8)Fzjar#pg#IfJhwOcCt zww(p*PaX_IqjY57=dW!=F_G@kz2Imk{9MqO!-11+BSnAAv1)>@ZI13M5@>_Y91GDDgWQa8W*qWH}~ z8NB|X2iS43x99{MxIw+EjnOXb1S+o!u1-v%*w^rSAo?eWK}zD}GM1h|4R2pF^N_8P zsi^*4`AT#m7vvZ^=uc@ul%euqx4ICu2Qq+><$D6GU!5(8}KbS5cj7?}1)MijTW z5L5`u{iI}>3SnE!B5tVBx1V;QDDLQfD&C$+M&{pZ{!dF8nqYvvz_+%}@<;PEHL&+* zX%$2TMS&Va2pA37Y|PCGzv6XCnOZj20L61iJ8D% zJS^%)9h3|*q}yT(ak4=*kVIssl* z_ZCzDE3c32tHSdd--Re`T&V;%COhg+`WIk!M&N~&8Q_tXJ?+sK`!SoSGI$xdh#EqA zL{_*IaWNRwpwsR@AnyjRgZLca0MRohE$)wus+#dkwX^{RTNDm*2Ln>?GYkLmy?nJN zN&+Ej@NQsDh?31|8-T~KvFuwh+!elV9-^}x-3!j!-fdV_O~Q=bD3&SBD{%Pkd!VoQ zI0|OmY#^=94FE4nC~b{cleb_RG6IMFd(>mRrpN zvhFG|!-oxSj;abr$t*?Z+`4201b?Ae3fiGRKtOb}v_Zv0gW5V`Ia_Uq;ShkXC8i&+ z&}PRp_O*8#gB0qbUsP`i9hz$tpBH57BHd&@RhL&)JRW@u$G zpHFzrnrbo*vtvk#Bl_+8L$LCmxlX_K||S zoN*P@)G+4O#8QsxZXjeIe@F&Lg8HLF1t76pw%luiLGr&qu{%))UhxKFY@6u-Ws4eEdyf7TJY*uW84K5@HRRoY8 zKRYe)h8nXxC;Z=8N7Du}7DK!_5Np8sG3XA1(}2BQx%O;nsEmW=ZT|28{IX)nKUZ1) z3-&Q-aNd})ZBbKVt0R`)yU`HPQ_ne|2RMvqj@o0jhJ;^&k~R;jx^Tab;4$WlWhM}u)6ufR=m2@RwDWUN{-*B5|&6pqM$mzhn zDu_`<(xer#)mw~9|EtgwhmQs8PV$GmH1`Xrc;Vetwl``r=ZPGEyCsx5 zf=iWw^0F&T(0>9j*gJ6lIuGiS91c|^PlcuxtE|{Jy&vF=DJ2=Xne_+EA%}RFwhdTP zID!vV>;z5kTy6W$CoV+<@G@|S()OmJ)AP>$IPCoI@*`UGEBHRG$ALs&9l?|42F!)F z+Z^on&L2v{`)oh-W?QlCLKyN*Kr^vZgHCa^5oEU+k6r(Lqi?~9pD(qlDTMspA7)~P zt8E1vW1H<`--*W1+Qx1K+YM>$ih3T2f#imy9MJ-^KG~vUf!jl8fPvZ)4RG3yX=ULF zZY>*p3IIENF2j7PnG6x(2MyFdqqbNN$-I`J&)8r{7K`^o@?AB-71@Vx)<^OB&=lQp z|6F_gfDC4XK3P@u@-Rm*DE-9^3sdUGcAF?NAEt0$8IV z1~R%d|3V1BKBD>hF8>SYLRVBP$lv`(S2-`Xo5Lm zk$rnQV`O53TnO%b#O;4|cU%O_cz=P?@2)bzxc08>V6e4Vw*Y&C7v0UG_MU zec!=<8%tIfX{&1b+31jI>>Gt0Sj&d;DOIY+{kZ0>n8+$G*-d-Dcx z#MFlnAnO)GzFW7Lt6+l-lMC24l;B7@iy1?BMA3h-dJ>7 z7L2G>$}e>Or|NAPV5eX==$;;IgEaPN<0(;l0FSNNx;4<5q=edX6n_JGmeJ9%M`EmciyE{U%9?lnb21n};Rvq?)e%rp zc3y%WZkHQyG2lGFuayyJingHbt9~Gz<7Z2aNZL9uw!<~fkf?f|< zCZh8>;7w`ELR*ZqNjw}(Wu~C(57Gk^h*>xYM1F7y6tW!NS(wPJQ8|!|vfGs*z8wbO z9Apj@f<=M#ER+6SDvoX2z+^q{&Mjkuj22?v>ih>d#2bAJyFfIX9ai6Vn6rUheA^*~ z*%4Utp#(jT0~c~LuFr|r!%pZAO6(VBJQnY)P%l^!`F}y8#Ll}bJMgrUg90E{V)5zd zWzr=S37X|@UqIhsuf^ubPVDa^2eib~qHihDHN7w~nVji`oVX$?Xe%TIG!anYhRL+N zO%IeKm=mv&kjC6qkp3?OaPQdQ{B=<7=Sx1QZWl}$AT+eB;sZug^fV7tyB!J&DP^e6 zQ%slR`-&ZR;ynxWr^Zz^Njc+t0CMI3Z>G&mNcXkeGmt}2f!NciJjJ&)U?MLXovO+X zud?ipf_+mxQ@DcXE7*OVTz3=id#>9tD=mP(SeMOYfkZ~FvqKAjqmFwFf>~jyB3K6Q z*@-aJU6?-G#SObxh@%B5LN#IY3u&Xs+uQP zcF||w{-+_L2Ydl;LJOeSux=808d|S6nxqxFLTh;dE6mtJ--DT(V_W_6sL@zIw)F(* zQbOBe?)1Vl!7XTZ44P2&uQj&X20nSu7+RLQb;YdI3~h05Xk$G4qrM&nq1Z#Nh2+h0 zRv6j?@Jg0J0%_78@@jfFD4?19Q-PC>_!b7w0<4cT{{v}+H)Mm-3Mg4$WlB)6r(F#z z{Ksanp-G7YlItKoRM|I^oz z?eNH*=_iIii1}R(o{Z1-w4tI8UdX$OZHp6J254)on<79wLwr5~IzLUe!fPO0;F*7b z4Sv~e1F^fsGw~|halth=Q_^O!JBAY+oKUK&s<}!0^6rT)*#p_DUsY4CC;2;KNl1cL zfPePof49jYd1BFQ#&y4fV z=}v6hoF0Q_J(I^%7g-0Jf84a(5wx zowr1>KtBemXiVoV06y`Eq3Z465vW#qe|Q-*xBj1%Bxg&!2XQSvAq{qPKo~?*ejJEO zS%BHCF+0wvf^(=b9J!kT`g{KR-=Ei5*9%OadTmklzrP?AO>cuQ0^++d_1w~=rtj>^ zg~QX%6BEc+?cLz?Re4UR-el}@g7EbgeC(-_J!X%#sF}y?+jzASd>J}+{^PD8$e{;@ zmS^iR7$^{0?kX`)yAC$PFlZ%KQ1PB{p@k9^@}UY!kljCvMlu}C5UE=knh zR|glN5-^weSFdb)*m=~^Qo4?Tu7968i@5P~Ny9Nw1J!7Dhc#cfd$AYyb9ghOmF zBqiM!NYCHQ&1$-AmKE(V*8f#v>Spw-3Ldnh@b9d!D*n-HNNey)@lkU9vbR-`lI<`- zM}4tIENmqB=i}P^i;drd%rMyCRie2PBs^ON6ek*s)h8OioiXeJ@C*6Pwc8Nn4`7=# zFt`46Aj)sgiD|DPhP&%7(39H4`&QGKULHi{#EE^Cy+NfZ3hdD)?S?Xzi2yN z<%WUK6rW^1IJH85^GE|DIsAdV(kKlwb|i%^Pj{#*`g48U10E*!eQk`PRcIFqit?Rh zE&hp;$NPZz$4MRjMX8;)r^3KYX85CvOasG6Z$mar``$&y*lM?f;|$nv!*}@WVf*dv z0rmNJ5w(4hH>}0}=)y2NnDRV~+;G&kZ{Ulxkii%*d^gAr}Jj`{0(YGJK zYOhZ#0dtPTLhl$3dA*Ku1RmZ6gK4<0PhSp?*!L5b`X0Uk18+RN-)!DQKI4sz>?m95 zAD0Le$uE)O00r*~SP;=5SG`RwZ5@dDg!X>bFOw#HwnXSnmcK9VWcM5*PoT{-?)^~Ak zwDfz|?9Qm*KaV@%?+!J%0q6$@q@#w#^p@}XDatlX!_AkUmd+XOtpaD65&w+0kHQqs(#cOt{Yi_)*5~L zE6m5BCdXVj27BV!o=XfZB3hmZi~DhL!p~l?I7vESqA!1w{6){1Y``}Hyh})Ns&hAT z6^%c29RCWo+>op|JwP4;c*(drXFESxP9$pc6kIZ(ew4=ZQ#l==UcsXmj6?dvao>Uj zegL38k*F>z;y%dVNQ1>1E*nsnAnMX791p0MB)5r=MkRc~#rTul5*X#7N)p;|T>$lp zL}+;ohq52U)x>JUUNG8(qvYOTiSL}RsxGUbf$~XIdqU$ zIJ)G_--A&;j;{RVR>Ej3ikiO=6KQR9m;4KLfTXdc$$TAe4~s4Ez-ylyk}*e<38Ur6*c)`rrz7MT zDzEW?l5rg7kSNJ^FS9Wkc1pun(JB}|k`(+ZQ3=EFnqexIfMt@vuZl2v7fN37UpBfc zFA4Uajze0VCDNs3slY}cD)#k4ZX%3Uqq5$-c>%HkR3Agc@OlmTl)z zvDrxGLwBwTj0|zDdtF?u9r+( z*2P4Y&BpyDGknugOzl+Cd}RQCoXC-`{wJOpEve{tvJQsZB;KpS@st~Q`{Hu1Gypr^_45qksE~*44+3Ji`;m7 zZA{gkh-!V|GN8_tj!?w~!SJrMQ>Ei@=YM!eJyS7){zC?A2lJ@0Y*7dD`dSWi%9@{I zooysqOWsf!0BABX#mF0maOegPr6cTP#MIXSv?X_#jq&shd27@i^I$o$-RLKWO^XK} z?zoA=!xu`2J1dAM9G)d5v^t`Gx4 zI*#Ke!Z}W2dm`_oLk|y?$ahw@A`ehEojY;ZjTnl)CHK`}ux~^veto&iFscS`mUlHp zv6`ZcT`%B32MJg1KLy*njuQ25Ay_C+AdT(@93~;8OCA*NjZKQAnV?i28-T7zX22(I9*i>41i|CDdw_dU z!X50*MZ-8#;&(`C7z&>hNB|O0QCW)j_!*N(H`|U`qNiPb?+tnZ zN@fTQ4@>e+m62Q+TT0l|bFt(OK`~}@LYc;)_Gdg7tI~X2nl)~je1t@DRvBgjYebuE z%IQtn=&vMeK58e%pgpvy)1=&gU}9Zde1f!Mx}#aEMa&B=Vek@(#eAxTMV~{3E$xdr zrVlPHO%T~Rjt;-fRLs!Jag-osqwa{Ac$mWk)Ke01Wnv^Ow#BnoenKjGJbTsbd5~96 zNO|?BfdS^#zLK1)Dl6@Q&UaC=9niuvv@dRkh{j3ulPb|I&g0>$pNl^006Aa2cCgs7 zy_K||oGUin&809}SH{(rTkNWbscL-%rYaMO>;~^l%(uwthUZwL$DyyJ^hGaSDFs4G z99c?^BHHF^tj`Wo@=J}u#^{1XF-=hf@&rrbMyd_4AisNKjPg8@94f7X(}6dKN_aXm z3;gZ7*#Px085OzpcE0?wL^wSeZ(Dnihi%=zVoNQhqV3*TXQxZdZV%$Ip@>3B)Z-q= zw@HXQy*yg7!jo?@}Ucj;E=_VD2GZo>_3i4 z?sB*SFS!*+^N2Hted1bJGm_vrmDtnGK>s;m#o^U?xFqOFHJWgW zbml2fG1%jzac&5YnSKRsKJ$hSh0!8B=3*zT=PA-L7bDTVR{y}8hx}qTkGI)R$l&Ge z99r$IBuid0R^Iw3ciwHxP4^|`=9O`_^7@hoUzKqfUCWVyYvTy^KWlJw)0Eo<8*fTP z3$jsA1uhoc7H=f0kk{L@vF#ioNqsvz8hqiUM#q==>ciwU4TFYmN&A(h1!efpe zCY^Cln+d}hX?Q;x^JoalQP>8HWTA9OVJ^Olw3LPqyiw#Xc*cX-+zrg4lJXw;V&%Ok zso+r@htClcrDGl|@ZQ)A)7E3LGVrL_$DcSf_HrpCpW1sXm_&7?n131)0HKy47^otaxu12<^9@5R@%Ij6b3ES)vsg)Y@s_+^lKy1?hf2UQwYY<=i3~n zb`^5+#XyWIB_8(m0@^cMlI5E(=GJeL+~1=4aI7qtm44jDuu7E<{27AD#$J+pTrct2 z>nY;?T%(7LZ%~L|31V%zj^-3DP>h9N(jk9}hyqr}0J8?N=RCICC8C%HBu0FtdL_D; zfn1FtXeKSo%Q$RVZo)wZhE`lX7@3024HV7s9JR==fg%8(3HsCwE3s-Xz*CJD@5+7@%9jg)-1|wYpszu_1~!{sL05cSY8dWG%BdHF zp*j~#Yhc?6#V-*(%)mB;Vs71siw)P*gR(tXGTtU56*_!^gxoM&%#jl%!uCb@9`Icn zI_NN?7E0KS!Z~aRmr2Wx-k4DTAghjIrmiQtrh#K6hnjsa?R5^|(fU`wHw;|+icR=q zNzZONq;vpzZZZN(%yx)egQm{dpq!Po<}M2XdBP=mBBblxe>KbD*c10hI_@XY)(<2N zH&YtnYpxnOXp=zkUScls+rCn?{8mXepEz%PB9Kb#ed(YtqB9%#XvE7D zf+TFGA`Ts6lf;&9CMI6FwAoL^qjMjX&gvST3NP7NYzU)Yp=bk%mVc0VzZFoX9<#A) zNt1T=sJJO#AjubyE7vGIIL#4Rb9pvf>l=fI?3GaaYBIrB<-dS$wpSiRbbU5$a%jfCOWp{54dLm!snWEiStlmWgT7@;SQ zYL0okK$37&D!!4hZdOjj{kU65jImMi9X z4Q`s1K(O21Eg{F0aRbR3H1oVRCKmf7?Y`(K7T0x{J?C}j^oovLki9I~ESrsEeYs?- z8jovkP+}mHA@%{}e!)Yu$t{#}LG=@`v5}l%M6)Xbr`SITcnLiaU!7viSU|84m-9@xOHs;eFdYTD16E| zEOo|GC~TDBd%#F(=f)Tg4RaZHZhFXLPkja%N=qR4>eEW{QvOk?luEpkc!6Goq+xLNXZv=XU>qDN|I!#_(X+~Oy-y%lW}akSIuK{uvWs& z)M3(3kyzNDiEqSXP_hFH(Tj2hgcP>GNCB4hOs?THnN{!n7< zNerg=RT9Z3WgOnROeM~W)#x2B@c7~jm_|MzXU`HuRcyo2^TQlIYMn*%zqlid-1VqucWiGWGn=ar9yB^YF^%n zJ$)h)csHBFKYQGe_Iyyxm(P+U{Sb%GSHBSP!x|1B^5;o%mZzdY^wRL7FV?P3$j8TV z_&1_x-1za09NW{UsF82m(an>jZQsMO+0&rbzpud*T`uY1Mvf%JytjaWOzi{uOqLhw?Q4gp|ju$`MM zzb1-js8HampHgg73>Chdj+XRj%o+v5T1q>r>7qi=J&vlmItXUj(bVNW2x{%9`j@$k z?Wf}W{))h z(2{0T%|e{D%!#1dTm_s_SH;Qs9<|t{m2EewIf;{2;hZJS9B2W!)-%;K(+!1CS%y(< zAs(!Xj-^_eI9u*#yOSf;z2&6SXr5HxmNB?CsXktdsJYsJVdEP^GffcLCO3+fjX-|t zImXbkSKbJw`%+B^9#^l}5x(W_RC^UqvQ;_Otl9dUv(xOdmZ^Mt%C;|;kv9=?qV8`Y zgQFFWXcRleOrjlX0nq#wgpmj6;CZS!rhw)8k*P!zt^(+<6U`iiWE(iDsdkAaf_aS7 zTX=3mpAahhWCH4Fn8N2#?Mp;%6ynG*(7uggxd@^OR57se3(lNG=1pPe6B z$=#lFrD`Mew`Kv~sip)?+1#gyXq9yssXZXhdX* zazJkz&xO#6f%O61ySRjwh<0sv&X;N(@yK>n*;MC)EVa){q`KJ%cJKGf6LffRaOnnGX@8+3T z<8a-+ds0mXs;s-Cea&ol|5Q@F!UUkF*Qt6kGTI{}76Y7t0ro6DWhh@j5=W`V4ySu2 zXDie!3N)2yxL&@Dnf^$yx5A!i-g5xC3}>M|7$@U{srDiIPVXW{rDVT7Oa~T66XpdB z>eG7Zqm=4+yOcq3UZ04h_mtfoX%FuB&z-vNH$c+WF`?r_CAZq2;(58l|Mt7V$Id3RhKzHpX$X& zQB5}5c$%7l+mVs!#vH3GDG1NVk|$I37b8HK>l02`>;kmajcRj{r@46yHxI#i5gD{H z6SvKWe^F2^pfX~8GZeXA#ZtIU<*}|HNmkkE(E<5 zz#<*h&PLrW%FLze-{{YA;asXhb6X)(f55XASE>}gVFohoCS+rYQW4H-2`0Fe(Oj}h zTh7U1eUdVV%I=`qlk)8Cwal}tEo?}G&m7T}7|Es;S(EB@ zCZyWKfoL9zJE}CaNOYgo=K_ct{@XgkHRL7L8lb1GDGi~y;`&-vP}Dnd*V>esG_xC8 zE?L25XJGM34o{>~AS9P@K2(#4$m<;wnWrKD>$A9?q#C8%@V$gpXe5zRc$z;1>1gAs z`c(53m9jCDV-?g2dDv8Vj%u@z*5>3gT4ahEOpVEYb#n*_c*D!88_S3`LN3$R ztHb!(e_!5$H4P}w5o$-&Z;b$Im|v5oj+EvQR%sY&_GYd=srcNEO&&e|k!q_jM31Sx zh&C8npkw#DQSEx9bX?_QtYJnt6~_@xkvJ6+tJE~9C4*dljKzVr8n+)Wnn_$kLKu|O z`#ddsh?6H2tal7S1)a!ECF-ZZ)5(6zpz8zZz7s8RL?KUB22+hQf~Nw#Y35tpe=1%@ z)dSJ3PNj328pk^w=0jzfO%Y6GHU9+K>vTz!5~{~Tr46ZWDuWyd(`SOql$tn7)_FiFOwT(Ybiem}+Cu-OeXZ zAfi^!SE`J|hYpjCa4;mAGDN&!$vUo*G1%W+^kJlLqCa0u&7xZ4I)I_^=Mrh=M?CRT zL6Sn}%F4S~4ljp$C|`&q6o3rF5iajW8l_d2un6 z2?f))vnr{!2#>jwVQ;C;w3by*rlfj#bFzC2^Jg70b4RzDh(V|eV-432jM%$Mu8^ul zJKZbQsVbukWY2AlNwuw##LTTlK7C@&n1_eo&nqNF&wDYbaC3cj_ZSD0g*o2FW%I0L z)h`T5@=EOT3M;TBW5gdU=i1e%@S!HS<`(nFlHGV3Ay1EDd}|OM<@!nHe^hBtwD5mg zFf6iW<_bim6;;{SbT^Of*bN{RC3#%JE+k?f@r1H`Cf2|w!KqYMV1i(|9mv3-r-n{u zI+kv$+fr}_8PhtrJZGDdkY()t%aWSEG~{_ie2n;Ss~TLp~0UQ^1(FHFaV}sx_Uw! zu-TVGxHbG^=)7P3cZsS`?x+yMwVXcL>68C;7@~4mp8Og79taL)XVDPs9-nHiH|06oD z=JxQ@OG#zDFmL_zkE2rm`70Bonwxmwv1?@PgR&4X-6|{!HEqF(jYZ zbx)IvJUC`Wsb(-WRc|&S<9K_#xLSoVi_p=HQK@{m_`2sr0Z0~DcCIqeb2ef zfqh-LsS4dzD$}~+HkW7?k=Su5Tq->A^2xTj+H=`78}CpGH(d^!X%fRx@&UIroqydp z$%m75TQB1>&m(E~0G3yrk$d;BC`X0A#ssav;rZgVt?Z%x)54IP0y- zrrK)AvkGr5TrOv_E>h9HA@@?H{?AA@j%VI-y%wpqu_DzsS=*X{9_6Ftqp2(bl7ymD z9BTl?sOePcD!nW!I@>F0>NTA0?CDKMk2i#Uog+Da@*{-x84BNnoDI#iKt1{8MN!>h z6r+o06e)p!&_mH>YYVF0h{ts)U}Zym2vT&dVj=hm3Hy6j)y#HdSsUCw5C~v z-LDuJJOc_p-ZK<~vXnK*gU&G$n^62gtTok^qtM{A*@7Bk0sYnhQ({PlDb@r9HN;grr6#3h)6Uqqp z_>=d+VinRB@0qxy$IKi4e*UR#qda&1FJ8K;N!^E}ozP&rXckv*)D@!JP<7?0RE-cGmqgV< zZGVInuq(_n47c4(MMF@jLv9)oMJPcbW|Jck+I_fRCfldSzLw`lmaL~igSX!)!#Tb< zrxHoaStaWW$8Mntdh*j`xFtr)np9%|`jhGYzQpTeTM*R@@7j+3S8YO=H{k6s_9a=^ z%SRW6LaZ?```@tropW`Bl54SPdP&3tBoSI>s@rA|D_c4+|2*N(60yyR&X=KpKd37_ z(7CL6p)5?l?oe-G=k#2wbfUGt_-6i+$ZM2&DWCFt?u7r7eZK9Tw59p-ggb_~$jWGr zp9|P?YXR@s0&id{)pAw%aV=`{2;<{UG4tX z%eozPo@#t&uYVPQdoQdmhd|G5Q~JW*RNzsbN(#P7A@t+Xc4zz*a=`u)wF_Ak({v!< zX)a97VR&dREAslXv$oGc^msDWNowIjXZ0IcO~7XYsV3CPt2d*RR?cCly5FFEu+lSs zLH>Wq=tx*^-b~>CUu1NC#{ZGgXMdr*i8Jv$evUG1@Zb9NJwr-=L_-TjUqho+#OmDd zTsZxblAw)#SYu@P2iD)8q8Yg8%!v0Nf_o)s21qHba8eH0dr z`RXd&VOwKF=K;XETeZyc_}}P+Mvb9)OwM7|a@TO$&m6yOTMsG!cp#?o;kS^}O%<2C z@-RJ^?>gUQRvduMXfsbTR*lSHS4XG{K{MBut1i{KOONkhjim zeQr~0bejz2pJ@s5gG<2CG!TtY4!3JN7@q&3O1Qv{*IHKB9ho3E^TKAoMqb`5!$TWW z=kQ!b*`A2kx|T@A~<)YuYqZ2E$zZf7$fGTs@p}4aLA@*lUw*%q!=$FNR)n{NT&{ia* zk<917{Gt<0tI`LkqjB-qLBsZlbt%$kiS&%mlQ}5Q>$t*uc>Ni3-$_CELBb~iGt#Tl zaIwr_f@FDlju^A*bC;WaD#E2cyI;EYUj?tL{g;>Wn=r(2S#N558}a!48Iddv@C_uxUpQ$SUZ)(Dl?SxZZtU;y3aNz#_>4A(thK;v1y_rm%SnG>gii~i}v zbmqO5b^WQ8>I8b&xe1!>^}yuUJ2*t65P{2gBS8dyW6byl$n&? z!2SEfU{-7$Nbt)#09#yFIj?! zr8kBcourW&TYQ%*;A`!U|ANkj>zsLbtLF52r>o6^-vv9QMQ6iEQS1LzH>4NT)17J& z%^O8U63BMhfQoN%Ts!$THv^I+d2r5zW_+{^O{R3wUzh&Y#X8#ZY8%@Gz1hrC#pQLn zRXOF`{Ez{oQQ7dN#kjMY>QLVzx_G7d31X7OZ1>09k_1sy@6)oM?fJLe$aJqVTTZM* zvXXY17ezGvC3pKX`LwCY0O9CGehZpBcrNsTG5G~1II>@AWLAX})4oZx*qRL~fVp#r6fpUz+lp!rdq$ z*wGlj&|a>k<@EjWl~HW@&AD3s_Os$Ot{yPv{xg>tr&pzRWwp`=K!NNHfo*~xPPs%I zP1J&w@2_Vm=H$`-koTDM5!L%gKq&ot)xEF5YLO&E9t4t zn}6}Q7?yd?LbK~x422B2r0&I5>Krn7LQT`0d64c1EWKI@FNa|0n**ttKes%1>;x`U zKcEV1yKcyAhL8S-veH*OA2?DJbh`b_OKO`%c4dJ!kjpzL`qa1+%B*dU*|~_U4z5rp zs6k62LeGoQur(>(jwn#umKH1h^Y5P7@ofP;x46=P)dqW@k?;&8ozyqXXq1QH` zUNKlqMbkpGEpr#x{l)K$FHo7_YsVkRok_er5!1!}S*TnwVPPhLuH0MQ^UpsG zPPAFMsU&l$0|VlRcRa}H)3kIbf5idJleuw;qSC8?yDxRBE3-)2WSbw#4Yg5s3ecp# zrg9*5+^f@KAkaeh87x2sG0z>VIL2Q9> zto!jr`B@eAaUYV^f%H2-q9&4|9jv%cIJ|3H=TCq%#)s;E;u@a@+TLBcf5p0RMcRj7 zP+lJm3rZ1tAuU*TssS+H0Y3Md9%0;P&gcGGTAp50w}S`5VDU(;#nf;7-LJ0JU9U&A zKOXEnoaR%vZ4@%_)vx3fZi}h>js3#fJ{=KMNB3+NH=Dl@e}h892mp#NavnkTC_9$0Zi!bR}?@FqJ1uf8Xt8WTG11q(Acq}pgbaBGMW zoFB-F?Bj+6So<}X1;f%6isGKP3zR{@SG=C4MnIAcJ4Y=;4g+2(#&&|?Ys?x)>OFUp zQh1}a2|9NdRqD{V?#V^gyOSyHLYC(J1L5&x3aE7Rw$j(%)11FexIfzIQ$+LTcM|+I z@cn}U%-^FJ{PD;4_ZzLynf>P-B_dwyWtA>HBc3P zxq4?rCuGOLhE%X4qa2Ixx`jgoiY)r96HQqDts#2J zhbd=e6Xgeyfkxz6SQ4M_)8$(r5h$I-`!$Mt#i`hDr<_c|p}qg{vL-1mJ-<_M%{}>c zXvQ6&UM?k<5{Vl8)KCwk1Cy$jH`jXrCKE5^I&*)2Ri!~uUbry%4^K=)Km{vR9K(b; zHbCJ(@wi(wZOH4fr}HKwb(Z*yQ2l0e!GwD6ho&P|sY=NR(cf7ptMKy6>#*QBvbxO% zbcR8&eKyDP2R$oPP4okQEHfdSgt6|TxSoOrg#c*n31D*%=emQYgWOZXUKk1G#BEqNwVViOjFP0ZoS0HjWjPO{NTa5I zW|9Dh9Brpp#7ms9#4>kDvz<#T>S#_lJSYX6vP;>NT$4k~D+ZgFN`;D^#pK3sZw&qWzEPda-vn~WrJHgRbBOE zQ2mYgBc@eWH8^dkAKupRz)uPzD+xZP1T+n^wes~BbuCa%Mx-47Y$jOBT*|BBJ6_*O zwv3CEr+>&_Ww6=LTvkDEP?+T8vnX8MptRcm#>VB|DvX=5Ku*&$X706Sj~O-24|86S z(Stk*B+2ot2uW~`vH)^)&&_*vzSKPW3kn8JvlaU|R9gNg=Vx(QPn$aH zM@!3{Pn8hP!1uJHm8h1F7FyB1t#KuVLKB7lB5mjnRZTNbbiV9zsnpFAN+aadl;TNoFzoH(#=tf>2rLr6YkU<2j72nH0229@k@}Hi3 zc;pdr!m5C%_I>W?PZK+_w$MiSQgEEvKwa5v_?&iBrx1~#^j7@V zk>I&#J)5R1GE+bpV*^-$&kpbVc{jm zsQt9?*p*dr9YqRr``J1d{a86N>;PnPEZhYOJ6p+usVntlhW3(JHE{V0-w9v_1gkWu z=ga0OQ}aeTGcoP@k2p6bbp7+l322I?jN1N3V(_zCucTZ@!7f>0bpz*}fpe2$7Cmnm zRUQTNwq$cVCQS0)x!PKe`e|TX0h>M4Q_R3S3szDdU;pvejVN@ykYV9t_{MmeU}!N- za>nr%9q%BD`aEM*2>(<_pIu`$gk@{S;Zu5S;TS)u2G8IST19NNJ##$}XBv;_{>Ik< z1&G@Cp@UAW^D!@VU{7ue2C7Bq*do{#X~Qhkvdr3dVQ9x52el&dVg%vyT$S?KsE+0nL`2A-i_SeNicc@4TMhmA?%=$EoQO`b%7%G#Uo` z&Cl_pigW%za`Y3GJ&4cBOV_!8kKodiUDwz@6`Z74Ad?$xqVkad(kZ#6@YxykQ?c#B z;MXfDc0rtlw?=d!bt@<#&>OhzZ^6+umWrLqtfCIo{ApdB@gcethsXn6L5V;$zvvqG z5Hm@rW36;tDd{3$utWU9rZ}K;qITdLH?tDr0N8lHkAD>K=Koa?WRc#yae2MaUbEG# zT^;OQfp)BB?&d20lMs4dBi5Ym`k7^K<0UeGC2dey$L@6Ay=(`h<)NdK3Z1L}CKBaS z^gB&8LZ@6M7ilNK^>>yof7~|A@4r;LeTOW+eV)rN;zrl@IQ{e$CU&wCheAn8DI^r2 zyWQa^?HliCY-2oZ`FONCcsqI&Wcx;bpK$;r!ug)r-(uIC;dv zLG=Ck-COXMIvF!?7?+F)Yvd0Sju+(>!SKyLfp@#tdNP4B7N zV%mBVwGE$mqp^@kORy+yV5k(#j+dn78F24HS&sggrjSF2AwaR@5lYvUD^rE%580wI?(k!kf( z5LsPcFwtS}7UODpa70e#ySt#YMm1sURQgitgYG`;@%$f@#tJKIT_MdpD}yBxu(Q*A zV^wKM2KGM9bXl=k+Zoqv_|;eDR~)NrMdplB--CQV+g%*#;-LTGoUg_w?mA)esn93x zHi=v0IQN!2nZo(^@gDwNl;pR@ZB9e-y)T?E!yTN{_sgA#@^8u+*I@wLAM$H13m-?< zqith16dR#8?Nt_kCHK3^#%g`s2)C_Ub-paLgfz(Lm{sPS@*W$;x1_w|Gxvm?NxOm; zy1KN;=KBkiW=$$ZuUI;?3d&9zRc@EJ5=1CUmu`!M>~E^s-AK5tNb#OwO*ltxWBf9b zmZP(zYd*^;&A-T|h1~Lf=QfNw3pWhU@O*9bHgSUyb0CTr_by1+teEeQKaP;;bWF0X znY*X0i!BPRq+A2)>J+l}^%EqyJI@Z`C0+v6@Yj=6^4>p%?)&ceQ5XSyn6GO6ap=GM ziD>ofxwIp|)#(ik=3G>i&pr6*bs!$lI)cPD-zqbmWZ@Nd_Fzz_KeQ!Rr!e|&gWkq) zgJZQObt$;!IW4lG`vfNDmqo`iWPZ}<-5FW67iM+sp6)V+<9-`E&uBqrYl^=QkZdG_N*s}Kc zbb;mDvsn)SmErCITAIb>7IkpejQNK@t_KsigNNT1So`$_mAgCL@>khg!4E%K6xS0g zZx;^|i~!ZPIzEk_Gue`uGg;xMg97N4_0ZPOz!Kr=TYD1PAs52qgs*PilRjBQ;p$g) z{X$(9$a`Jp+{Z76f0Z$m=YgGc3N4!#NmBz3 zb}{pgE9T@k8R13yTOLKxh8(isTrmO=gxh5{^Qhaw$$6L5%*1E1S__RN_cv>F^7Vz0 z;HXxzHjYuZ@{nM0J^6D(dDi;DK<8fEQtW)baZ9}q0z%Q{&oCpQs`}+Ct$=V69IPa= zWCvbX_Jq)mc#D8;oNt4@d+KswDj2u!$y20hA~pe6Ca(kR+!H<&Q?qcMof1np_TObs zuiHNF7rH(RX$*>$y(i2gwootV)2L&*zvWNSs{g!rbUqv-OV|w#Mpf5A-<^vvop+?O z2r76xdDe74vLqrQGVbp=s9(!A`$>n}K=E&p$B661Gwwmqku45jzr_o@T#546bOEz+ z3XrzUl~xeCHvE&5^ZF|druEhJa1FV)`rYkxvtAg&Bk5LgE?f$pp2IIh#@3Uu4Yh!{ zHmeJkl?!W%G-ISQ5SItj(O>p6q7C0Mx^`T+6ts45F|2xf z;^e^}uQYFFs;dLYsE*>f?P`*ihi9AR9r|WpA4owEEZzzk@m&@T07zL0xeGYw@!s** zX>qp2Dpoue$*c~mTI4#STwR3!*stTU3d3ss_c=PoGmSKMBJK zaN$H{+?cnnj5t02-^!sTTa$^x=Q#S?6r1lW+}L6C#RXV2<)WWS^K2+TnaD`Ynl$>N z4vEc{-(hbLi+`w$<4OVbjAqf*Ie|W<#19;P>Ae}ol!ktJhQYjfbcD4EYcJafZ|VS3 zL(+h>YGrg-3W#Pw`J{G@_S{D33)OhTLzx z@{VM+$^rz;dAw$9=nG$d=?*iS`uUnyXYhsfqnw(*si*5QU)ieFo5gjDhLJkz+n}6# zkJiT@qLBFQJ&=N3hy5A6d+YaMlhW`Iv>@1=54_W_aCLCaj`>Ev7B+$$eBGIck;p{B zQ=8q1rUx;LM65j&vba#FOoG~z?ay?~AUU@pmCBBSitBlW%KoK!ui<0>uV``doLXg8 z66%fIT4ft(f1DLoF}b;`N6Omc+havt!p*N!1+8GQwnI$4qN(e!|H)Y{`^xD`eO7C#vH-KT#jK zGs1k4+fl=NtrtWSX7ed*-0GTu{E(4Cpt9#;+4O#y6Vcm}$}wYLLQ@gEAWJk zn<;|$97;C$JOKgU$t6;zwlfsU=X^Cj~_a@_y$@P!+ zVP&H-;EA?!;N6T$H6F;Opm^~vRb zx){_g=bBz^pjV>|1@Qk!KJRt-^H!&Kcy3}C()sQ zs`dg!&)6#`ET2a#uXa?km+a)UN<%@>Eies?Q{55JM<_@dJ$Wz;_Rq*@n?*PnzOd1z zhu=XG-?MNGOw=KR^Xg(G?C`~8QQBp-)wUf$>wi|F=($vSyY7FyN!bxRY&Uyn0mO(G zVtEpRx>W6X-Yd=wwmLe9=8D^;6bAfoa`6pr<9~KnRK5@QDGI?;VH+uk{j!eX@_G(u z3igIfW^pOW$TmrEZ_WD^SUky<;ZufYfjhiHR_&&=hCb@Sn9YUP2VrlAy70l|j$O_z zQ7#keBNUho@2hq~Z}3gbTkErvpv8Iao;E@v)4uSoO9N*9q^^Ty?c5(jsbnlpQmYG8 zf9xyM)rIf8wa3eW;%ErsPEd&yXRZh$GlEdQrF0IYQ~tR{fgtl)$wo__&Fzs&|E#AJ z6bqZ%h^;$1m)fAp>n!a_Kewx5U+Fx~e0G#2SUS%Hv;HdptsfMC9Hyn_^u_`KvT-6W_Y|FO*; z-*;clsw zPznnlUV&O`I<&Qld#*(|A0DSQSx1!cHH}sWGcCA%)^}9*7k5|YKPwHXOyA9vkHwkQ znC3J5F|F1D%)TfvYg&EieBM$Qxt}sZgjaw@cux@0xoZ|U;Yd%b-pjGALu%|iPeoQf zody^75%U2}YI>`corJ99_?$7A=-;S_D^D~@e z2(pEcMgbT~?)j3sS5odbjzIa@U1|}YlxdC=Q@lVj)}qP7EReU|5(1x1`C%4qq4qSI z1At)#)$3?(J~6m%0n>Ci%z#!BVZ@;U(cxY z%qXf^l8GLhOeelS@NFRPwrr94;J0G5W^L}{BORFOzYh_DySZdSf%Cm&wyC?*WLNN0 zGt$aME+C8K#&beh9;1-Qaj!sYdtR=~20*xWzE3J0Pe&q*qf&na)Kg2 zn*M7W#IcpVs^*bkp}nHU=@=Pq3>r%3lzld5(`O%x64XhXRCPo>*6S81QK^OXP?*$? zG?q&0Ck=KezIxPl0n49?6`yJ=cKjwuYB5X2woY#`E5W{yU#ai?t}>0oiQt)-qA1PA7r2x;$0olWEwx^Nq_T zTy9}mr3mu}d!mY1SIGmzF-VlATV~4-5c3wJXGttAN6!RtnMKX#edr_0 z-}f3n6Hy1-Aun?G)va@GDb3d6Mc3p=&<9$Va6@HosMQ_uh9c3ogG?rq+$rN7Nw`LZ@MG)b$*k4r@=HG>)a(^ z({Z1Nzg%=iG!8rm#NA3@Z5uOUn-x3lp{Y~zUCISu=xSnzZ7)%~9GfNS9XVNut60Jt z%Q3Bd)G6RI?8(WnF0*V$E%{qkEw&glruYrjNTxY8ODjp@z0@udXTw+obylJf6~{mf zE=l5)4I6$n{KSrbz)_4@AfwCggBgdZ|#i4&s{waGkr#bab2UR7yV zd^(c`@Uin~?L$sDpNTxdUWOIze^pq{BAMF_JCcv98UvLc56`^2KAw=77L#Zq5` zx6F0tEw_>f2;X}|{hB^u8BaSKdfAh971%{fY3mkP1%bh_Oy2 za>z|FlURVyhyd|<UcTU2TVjs#~nIeL^JMYE**L*ri7O_|5~J&hxZBn^#*QD$`$ zzwyPW@%#%}pV<90b`*!kh)55v&#KZ9i+ThBh2WBzX3+9jm(aL_Bf4GTOkSa+W^{mR z2G|E2e%@U)PBMk(L0rW_EC>oXtMe{)SFGfu_0^YLQ+-N?#h|u!Fs78FI_6cP=Bf5t z+U4%r^~1)X9<(3)Y)_~;iH`(k(1?k7C*j#y#K3l^l%>0z%FCZc?d5>gxf8`AfJQK; z8=PwWmoQ~b>#@!L+@?3KsSFf^GnJ(pZC)HV*=h|ubM1tzc z^Lf1-^;|Wyg4m=V;HnX;sqv02ma7VUQkp9Ob)-?<(RUD<({sowjU<_aOcZ9$1q*+1 zWOnVUO2svM!(>n+QI5pCQqJhXEug3X;TC}NntIUU5wm-k{oIpk)7;>c{d{&Ob#*2Y zW0_N_)8klcd8<3SshPnmY1Dn4&^p+!?Bk=q4auG{E99(CH}oE0WxJX0xj3X>@rD{* z+p}=HUDSaYXFZjZ+~TeNFkrfs)w5N*jOFBI%-yM8-Ol%Tj?f11ViNdWv#r}s?FGi7 z**7{pk=eIB7dKvg+Cdzjco$P&&R6h)E*)FWB5*T$RvU+^|9;VX*=Cz8HBHuybM59D zXsqypkyF_>a%V3|P|b6X)XNT>sk8e5L(4Y4ACUBoAKfNZ+crDN{W1upitQcO7#kO1 zFi9r&|2yv-RrN@7W+-dfI#HM9KILIjYhaC0JR31BTDcUMUKQj!C)zUgM~{~`Vd|eK zm_3k=n3$fOWD(DS=bKI^V0&@UAkG8By4-2Cy8fRA)sMNKga!hRXE0l3t#|uw8f!Ci zqB&Ed#uQrb@Wf8Bch>0LnvPkU#;b`FcRO`741iLCL>?u42X<1k+jjcF^ZCIO6yZdZ z30Ac|IQU`E!7(YN2uamnyd+=1 z8;AW?Q_2J+hB!Iro`)7yq&Jn~`!`Ox{M24GnW{E(qjFiJ!{gpMdv@fSx!?QjwzJnV zp+*x|36tDYJ^H`@o6%C`(AS0ozK+A$*eEb!%t{yb@ddQpfo-FcGIHOhqjJu}p%1WX zaj z0lA1znMGXt^g|$Lml%lE=58n?4i5pe0lUKu?jn{2IRziw_0FUSQO!Y%TE!U{@i0b`$hvE%(l^7u)G$0*!aT&U}Uqvl&P$_GujGSV}H+qa{|Y^ zwfWM-3kOC);6T0m<8b_4+YGIcy}kFH&kl`P{Ge>iJ|r zOp7)R5gc)IHPUkoZ~a6A5oR;rQ=(@poy9dK0@DcCIP_?o%X}~- z3}S~$f3b&4YuSQxImcaj54OqSLs88l!ZXA;9Ad}H_US-x{8!8~j*`bzRp~fPcZ=~) z^lXsK=b&P8P4DnF$Q7)=&)&SJT6S+yEJEohz+ zZ{wFq`5u1ae~EcMo<18aI5e3gOyE6R z@C|xY?>el(2mz|B7?z%Lou?M(dY1OV-x^}E{;W-{%oX!=Lrc3JL5TKUF%ZPojF#id z?tmr8+bzAIp+y(Cuta;?I#+eDw$VlBCc?)S0{?}TC*JeJ$j|@ol%ph`HGBPfj>$G$ zcn@=3jc(SmqnoD(Q?}b7fIG&fiTTXzS+G7z3B7d){wn1W8fDMD)cyS^FzeZ;B}_T0 zzn6J&#Lg}I6zmtGn6^>ATGrZHZH-H|!-B~hHF1-cT5GKazi*VEb~44js&O!U=BgxG7P|O;kbPo%f1Ao4tnnpB8nW#(-opIAqd)g>bD>tR1v9@! ze>>CX(&aPxufOcpW}DjA^7jj$GS!SS(aWOCQ`dy=L@AgR^WP-#J?FCR&M+Mpr(~zr z{y0zR*~r2}z;()fY+?F|b8-~eta^s|JOj4b$(TUQ|DR19E&hM+m=fYmQ?~fBYj^PY z$Y&+q^kQB9_cVd18BremWMQ2$C*Xv0+gRLXuB!yV{_iUG&jto4%6!4w@ARb7@+u*~ zN2i8^g7~oR9`LH;3nurHt6qABD|~y~NXjw(Y0P8Tm+GQWT$3;e4@1Z5J-~T5KU^b! z7X!4240Nsk)fE0DY*7sHy_;9znEzB<9#E?qlrA{@p31^S9+ZJ;2d!*UJ^$OPv3$E` zkqPnb-&i@#D7LWt_tk}D`b-JJ`704a$NK%5>o6r%!eN4i)MkCpDPE-xwse&dSXLF1 z?0*A2mOYLwZhbnLmnKfaCz2I>Jo&qjY)L3f_jIDNkQ`Q7EX^(QFncU}8dDs{viB&h zZXDRhG(OB}>B_2Aq6lj|Fl2ivNxU|fYf(=d%8))m#-yXnYy%*J<}sCrtOh1L>O9o`E_L6%7jgzk;)yar^9IDT|g zJi*35YwFj(dYY;Z)mlCs*}V^g^YG59UFD^GXS}#RVTIZS#-p^;(T@f+`bSZ;hmeKQLZEJ8F4ayZii_3a`x-jR*^u{Vfrq?PG+q zQG^+GacAy5z`S`0EmH0L$nTTZl=nJ{_MVwzR>-%U3y3AJ)=Y6Gc2zKY_=@Mnj``4=O0b0IF~Drmij5@6||BrE4C;W#ah!K zwECktop~7S?_kODqMx>pr46W+afI+FwF#K8HK%mz=^0M-FRrZ-xl9mFu&Y3(=*xls@BJn(fi6Zt%n_xvU|J^Qy^|hLM4}1JO%bfh+;=h;NM@Adv$=ct2 z@OYtI00NVZ$MZ?a)vpV``7DdxZtFp*K6ueT-bmelE2CYn1v@>r+y84=dv?^!(mt7~6{hdEx^^2w}&#ieF~`B1z030F$_i z1T&OMtfBT#f;MT&yV@==sr%mKTH)dPLP)mw2N$LLNG^5J><$&cNs102&TN~Z@$tpT z;)9*qU1K|sO!MNythOTGs+)lY` zrIGBGGZbp8Ga6M5t6OyuqJUGBR=;8%-KiFqunukoA+s#@%WVf&EsMF2@@0(oYB8-$ z2^Xxd49~lvN%U;nrDenRjLGpnVc!-ZAnoVuGl<^Q59{P~raG%4^dDvC2lwl*zi4wI zTTTJ8?`8b~HcL8LrE>FLT)qp6Pu5{fGJ{7>fc@oMFsO>Zd}h>Vjqf`&MDZk_Sm3DO zvwFMN`uOZE9p}s9fxnA%5s0lNR3a63>uCS?q58pbdZH~+K*iMs`>|zQ-IsRB~SH3 z#)^68%ML}p1AU%>_+xJX8-+(fL6;K|xud67BLoHdz-XiApa@=GxQ;@EZ2*}Vi?P48 zZnxj-q7fkk3!u!WNIJF7{-7#E54pvN(JGehi_d-3vQ$ymKi>Sys)JA;Ukij}nA4V1 zSfc+W;*DMQG$n6Ec65(jk$D&DoRoI_)wzN z)GPe}e@tjf{N#BRX&d7AYMu=_zQ^v&PY4%hK~LvpDIkpNRTgx&UpP9$Q6F*slfN5d z71Se7#52(NuesHM_Ol_HA#REHp?>+j`{ITos;nvI`#)$@vRNVIi}SIWUwZ&-k{)jj z$UVjIiG%WPras5IK8cb5Z)Q%Hv39b8IJh7UI=bP>vscb+MBlQD>l|Sg?V6vP7A7o< z(D=p=q)zlmM}jMUgd+dmqQpBh36ocE3npS!GZEqR)eaRNyKtH_KGzFtE!T(yNGf5V z!SiQnCEaY>eMF!dwipi9Jt*k|%~nMvzgP64hWI9W4Hl!fO;o#-5rQGNSkGY>FeFu> zFRTNiUH_z`x?~t~NvQ|?Vao9IUiKeR(YqxsLZQjURtX{|b+|@b)LQ~jw_~OO-p*)^J zh$bs*l1_Q7xndanw0K>EZLL{&(5}?N<|Y4vCk8wP;aguZYj4zM50& zY%;b1_Tp8VfgI1!i|oskh!q>X|90lub4ZWaW@an0g;bu+<+E&>INWGaZY6YJ0(XfQ zhux~^TZYfU<@UCs+=fwf+hTaF3TGB0(TiKxU4 zcWFaiH$I@caoZ-Xl9B&U|B7QW#6#hGl}2_ZKZqAjo~^A+5g?c7mpN+}!X5poL02MW zoN_v0+XOojKf2u36g#dI|EmxSr5?_6P+32_uGd4#yxrl89@DCI`$w&Xa_MI%V>1`i z`GNpuBx*t8jk=>FHr^V_ZLz6Q+>u%jl0ZG#qnV z$0mww*Kr)Ot07-)#;4v8!7@>2yNy4t>wH_rqqqASJS)=P<+80wlp?=r(E63bIu#ZD z*0p4nn~9z|DwkkrI*}-LpQh~wzxtn36%z?l3k257HeQ0)ebF{%Woqh zqQ3y&ue zCkoVMn7!u-4C0y2zUpAxUuobayiK?#i2za54*-vl?Ip59k-dc3bu2XfW)=KmUu{;u z1YXi(2K;vw=wV6n=P zMA^Ke1}#xPBco?f+Buzl)103@mp+{Rve0II*~`s0g4go?dk>xcR;W}hupkP!ztdLvLPAPNc!|CCl`PNXL&g>bH=}wT%|B(y+C)#Lp~N-{ zWnvcJ;&lT6?$?H03%o<`*0DSF5BLU+seZQTX30?jDe%?JG3DYPO6}SSAsX~fd+;IZ zv9ek^G4_v`RZ;+Qzi<{{@|5*Qif_9UmF~;Nah(##se0T4?yF-gn(d)epygE_l1l+9 zLpteVNa}xjVBE)jP%rmjYNE%%E2}>ly@rbDkZ;*c*i5K6>BZ%Hr##=Y zb;6Vq&Iao@U1)>RLRMTztB9A{eN$>YEw=qJiq^>LRO@qieu&{20N}ltm$8h+gqPpt-n;jZ z`LutgB6qnI;5p$w<6F;lbj(;_O|0_)btsa{Ht2G}hX|P2)SaSR&?D-3+5M|B>Wo6d zZ7<4{czoKYzEAn@gsfRabL^PxNv$S-(ME*eTgbB~m!&0N46r(6I3#D(DWt9toOjvL zy?ZK-A%Ujb{{^+B{mZ!fZ#SVO`_T<`Yp&QL`2`=a$TlFf4X;a1AGi!<(-8Qa&T_tM zE!)e@n7d|$uNfaaVS8DCbB#$Gq$!R7aXtehPQw3$%VS@v_oUu%o) zVcky9Uv!+N>M$_spyu#fq$lp%DeSMm!yhg%I;wsdDUw*YkaIDnk*Qzy;srVN`{eUd z*1eVgMiQ)EP<0@9=@#Jbp@&$Inwte__yXC^Gy03LsA#S!2+M) zSG=coC+2=@N0@rUCRgoV+QN3hXY~V_m)2C^Hm@tOb=w6KyqiL(9tT)wZjkE#G8|g-x5u`hZ?iP?xx0n4wb;kS-A=N2KcxdcJe?eBS5Y z&9mo^{p|hQ@4Mbro7uCLL#k_0WuN)XvDPP!52Dg?AH)nh35rOz33PyfH)jc{mlUd( z@fDGAy6J~SKGW*QeDE$iWCs4KbGDdy>+oe_$?@Td^|oU?kvs_~ggx!B z^cxvZQT+PC^}#hPP){UV+agB9&g4Ez6kUr`fvLh8-AzB`s-d>|WWo*Xyg2s;yPKFql8AUyq2;MN#sI^tikRV;fBqd zwO+5kIk+9q224mfMW=n^c24%8f8xTiK-EoGRqzWR-pQ4O;T_|O4};Gy#_;-PlZQ0{ z!LUC89_y*i3* zA^x8Sd5K7cZP%{}E8Cou#BIN)x{i|9H93+yO};Q1wKzY4d`+3sy*x5#Zs_EEt}^v@ za8lFjuybtTM0;B@U|s5A8R=HcTZqD4-G;zT%@q~Y5K#2#aRkyR?Zd{&&v6ouKFq&! zYj6NPM~xJxCL4}iMm^$Nm4Fnyt+0HaaMCGWHh{HVd8nI21qyy|8?#i)l=HHr{L}@r z&B@!wFPLDgj_UBuDUUnORnypOq?vIT>COXvLAn`0n5$vDn-}MxTRl=+&h=;ozCq#D zelJe3Lyd?$<|LyP^~!9oSgvdDg_OXS;Qvv!u8|9tcFzIaM#lwO|X8;)cw(=~dA=J}l!5>6lC;a3M($ z_%>aM)FE??bQTj&p8D0>Z^tRu!nlNu>NiU9Eb3ZHjAU2Lvwl1|JVhDcnSdQ{|2ShZ z*|Q_8HH&~uu4ztckGBN}DnnPclW(g`J0Kj)(XQCR&59izWyd#u;s>vgOf4{9ncB0u z9!ME_=XyWdz81V&pD|WrQ&cL>*W*I!G~7c~YBS8#uLr$($6xzweHdH$ z!O6P_q?i;(^P}F$(GC+RkEBaXnQ%vxiA%f3wO;)Y9aHuUBIgp;r~(D9aa zBK{!YTVStKSdOMMdLKJy@5903#ABpuh8?*%TYKifny(xDIbk63)22t-IUo5{aIW`L zMzT#`c59>-oJUu{e=#&$24l4`PFpipexPTy2CKjcS19&DaUflvE`6*mZTGORBwTJG zMwFMBv6fKDbxyftLvq{J>z-1PeWf<0o`1}|zqFytx^*kA@SW$Mo0paKUUG|V5Lg4s z!yjvRH$3({M@c;3m)l;kD^91<+i57zYFuNMUwzJeGk!$w6j+jVZ>OYN#H%Gq-cQ1Z zD1g%UK$(d25pJHj=uWl0ZCd%f|7`NzwDS+Zb)63ucJm{mVLN$Sk@TraHGNq%<>Ff- z$yLy~u#v5H1OH*r#V1U|S_-@shQ{yluZ8m+8o=lPWRX)@k{xlbT07DtE!|Cvw{lmq z9WU*YBL}@z+J*(#{HHee%>}G<%JxEKZWm|}EEy2v382~S(mPc>@)#mAHoTPDAiR!v z5i&kS0(+L4QSN?!sb-<)Id`P*@xf?oOeZ&DTg7^;aVGRy=u=Su<3=_R7l;oH0=?g9Dv*EXWT( zRq}|@kBnoLivK#~>&<^8xN*)=4%G!n_xchY9ORoRoe0f66kJooeqANF&JpVcdXRwg zhDyc&))7Cr9fs~&`H3}7fZh9DlXE%WfVxtC;1$V-H46U)7dif-Gk09!wn78H7|IS0 zgB{DrbDeDK7-#mDuN|q24GT6d&Z$*8Zb#XtWu~p6{vj0F*VBCr)9aX)^hn6_f}?)H zmMrl-z{aPg()PJnjg3c)_i$6bp;vdOoMJ}S6<%$m39{Gn7*99cUE$w+&ec6bE>+6| z>KhLZ&A0D*2w}*$isemT7r58*t(6xay$*}zfa;M>QT*=7=P3~eRwd8-vn zoWj}AhE!D}{3`E|cUjXVw(ZldTElg3J{DDAcfuMG&_lYkeDYgMXU?5w4Kv}sCdxaf z-Yg#nkz!T9A;+p3ZLc@&)K*Oo1FQJDk#p=ArvSsNPrM!)m+gr;eaMeZ^(~r8G^{eV ziE(gP0?w6l&2}_#Eyl7?!rtEaJZ?Cx>32RsT5Jv0aedPWmPH}Bj_iteu1eou*lCoo zRTNL?xI14u#aQ3+o!L|X>|jYi_tX(uM}*c0m~48gT+p@^w7qNmWV!%U6Q@*~cOzh5 zE-ZM0?sym)iwUV{dA5C4O*9R!ei=ON5HTM;Oei8Bg;kB(YEq~&q->b`c+EAjt}B*I zN2x^Y8J%bd)~BC{qjb$O<&%ugEU!UTA7s zyjU)T0j%RXwGA!#XL&wau+d#-;{4$D-D)yCZKxYv;wsI@wqQ~dEiKV(+j>Tk-FMbF z@w=TmF`wt%T4V%~q9V;}Ib4wK`VI(I^I;MmFQgYY=8dz>DsY7I5xXFwtsCC{;V!MO z-f>60h~+ihs@jaZnvUoe=JFWNdRvwM;w@7@)V6sr6K|E{L%;IP+`P_qCqd+PeOezoLT zS2?=J_Bk#!Ffd2J>oezkn)=}!9#eqB$;ZapJsZhIk=3T^4Hdx&W$#FfMe~tXPZn`$|vai}_*yKq}Z? zPv5ywL6UeWsM`h zi8tI5oznr;!c#J`?R}ujsr^mise|`k9U&NQ=>+h@A5e5FNcCQo1zgCAaZX-PxfDkQ zBwdM=Y@w0F%ViGkPyuzOTPu;U_8MPe^G2iz1f3p`=XmZX`yN}VF@eyg1mCFpuQg(; zcNo&2>}a{zU-{A1LUH)Yq1c)*s750(f1VYjW9i9|&^iS>ZIZ~O?vvibIIl(XocRY5 z0kYQ>=q;}>(*QOrfdj1c6UWQ;Vp3}nfwo3;J3*)DR$19QLB?Z)cSy*AR#{@sXj52w zotB92?PQORUmKt@Ame801#G-c-%OKKj2Cbz58ZpB_;z2lk$m2EyyW%EN}tg2M4nIR znx(_$<|gTa9whJllbQsOYt9~3aHz?}_zjn`o1q0fVZPD%^vLb;NksV?hJaQ?__L=k z@10Ue(%Vs_KXoBGnq>7o;ZL-S%4*C(SC5kM(Q!6G=&*>roG)BVjiHQ?@FS3&Ip&6v}!AbW7sYY>iSd9xn*<}Vd;@=FyeJdo!v6GSsJ zot&O=C8SSVqDn}U*lQ0lE`7Bb5;8O~d*gSIF7JfmjZ)U%{o3JVC#q=b9>ju@Ho{eF ztRO1NB%4ZfJE!U^b^H#AnTHtPD7S-l4r58UkJG!JEyjIZx60R?kgv|UjcVGEa-{vU ze%_)DQl6}G-D}*V&zGF*j62sO`cDR<^mQgqF|&pVt{+B~k7wS4*FhAmR$H12dn!Lh z#8)|zlQcns48nT7f0yx!)I~d30wRG+~4O=9uK`)Zh z@5!SPsgQ^Em!HM6W5JK2;ScW``N{I0jRWN%ls)Ym32O@R+7w@powzLFTnVRWhrOMW z#=zu;G;_arpKb-noEpe9SDq_Jj#;E7 zs*g&#_*7pm0ESLXOCILN0V^yk?KhTLzN9l-)-?IAQ^$EXqZcoyD zscE6`J;hx3Jj(d}f;fBi1km_m-Z}5ptdo>5nqw^gvEEEjpZilBFXi_g$2x1xH$ql{ zCoe!ZEdZAIi<8X5SfY(3Rw(&QE+gejl*N~tq$k%iuG$v4I{bVeadCPQy=|fuv@C0F zI7UQ;?Nyy#)WwPNZi;$=tuIkl!lRC|}cFHSG(^83y%hITM* z5a(UDakqUKPs`iA*BZIY)KtsmJvt?0)nA7v9oiLh zltjdMTNFiC)0a7Xu8xS&BVo^S6~7w&Qsq*ab-t)HWL$4l;ib3Kk?o+pTm2s24R!zi zvUEf#MSK%VHru+W&s!!axo0SB+R(_(vaNWLRj?`!JNvuowE5*XXJH?~?VV?Fj3%?z zdF4d=i+a6GrJWe2kNIE2R9~OvP|X9kQ@)Dr7}vP$J*_vZ4N{)K&@ak?W`ywM(2eG-LYL%`Ks$fQy+jJODa9*aFwNnsFtq~V1>=Z>7B@v z)(gMJVpL^7{M}#_t7Qfa{SDVfNd)=iB|8C7*cq31Qzj# zl~2m+Maul^H|v@O0dsSF?@im@+W@U>ErK%mfh2`9ylwfwP7%9nNq($&HB$ZMh5_S2Wj?1cQq9GHJ>x-A zek`y6$(PL$CE$AsoBT?0KGOxtKE<4q;MZ^ZyDm)|0`ZX_t*hCaeQNW?98s9h2Phd2 zYV)DH3NhRvQzH6T$r-WN8mPKw173`^(6Qw2mPN1Fclkc{pQdR%*AA`-NH1e?N$tma zJtUi!G}EKT&5Oi%X;n>-xyd3-5zKOFl}4aHx)a-ZU(y?jvGctiRXdBLqMawxvs=MN zy9{AFvjWLs;fm8+NBF)GkwsIb6LrGtA-|%P+~M64c?vcUyQ_z&`U|d%ool@mh@_su z1hd<=kLR1?C!GgehyxYim1Nng1B^_eSKh#i=zaHZDRS~bg51JLl`zolJcdPHBE{HU zY8Ul>vug#lLb$PFL(rE2C!2!;&s5qk98;hfi!-|moDfUHaQ)a5__vD9wmJA}3}l8H zG0yYhrx=n2i?-6uWLOhKbbI(~ShJpY2*OZ?dy|Z%zjF{Upz$X~kg#@V0HfWS;YIJl zF=W-SWyId?kQVYVGg6E(P)T+*DuC6o<*Aboh||rv8ed>5Xv9?S_ZBeU)(NhzEWr+(3LiHi^w3wOEvcr&mVio0XBhD_HogOLeRd{iZ z4`FoDya-CQlpS4%0G00$jN(<2N+@aSDr-6D7&Q3GzEHGxO`|nhWj3b%AZrNtxnAPQ z#l0)DUad;D#uc(Rz6M0O<2aTFK$Bj~Cs+a#go^+qtpi>Y$Rp~|<5*@iOL~tL23j9o zqHP$_W&R`t*cz43%c(*%wqkiIKK4%Ps;bnF_yvBBdYI(OO$akH0~S*$bT~ZwK)*6d zXe*wcJd#2Y%Fr5zSBJH{g6;0bZO%1L)dRr9yNgp}_8(PnwR%#B+zlXn?1hf59?q1N zy_F9&FeBcBSox)?`0oT}f!YeA-c!%Xi?F+rrvc-?)Yw@7OuBTjeR2%C8y&mc*B7BNfQ)w+J-ChzkN*=ol4BOlm z0N@37JBeF9VB4GTYyf9yfO%HJY=fD7%&0YCXpQS(=Q2xl_JxcQW@zbV$nCxPPmYq9 z(JbzhS_n$WK$-Ils1)$pch1zhNx8Y;T9KR>3-6~PL3$VGOMfSy6(jmv{a~?5If*;! zU=>zrz$5i&zSJV5NDa&*s(l?G4okU4sKHd(azy0p;M1-=%J$g9ZdioR$_#^l^MnBq)?;E2ei`+bf~NlX5s>JCRRC$L5<*^QV2+U zRp5E4JdV9aB*$0b^F~0RzIh~a`+)*N2!1SnX@|k@^ioM{07+D4)V+I*sAZW6sERW8 zr%ZkIBOEvGl5r$PHuNhX-l@ZsW0l8zq!IB{dxe)BME#F|Iyw>Sa?g71w44{A9YA+m zyovVvpEA!u@(bu#9_d^UaVRF?0M?VE9t=5ppk=%w0Uy0RfY`; z+GYKq_1gO)YUB~vtzni=+|pqdl461D3#&}2di);L@=WnXcSDc#OBBRjsJ?(r!DTMjYi-A+FwC>R6)o(;>BJ zKvl7RLjL7Tulg$Dyl#WEz}P=i{cR_AF$WZ-*f-&iv5E26oH9aB6JtpF zdkly-YvXTCd@3LH0o+ECEUzC2{bhmh3AFaMnuXKgp2DrWCM!_$&$d=ByhW#~U4-^y z5Ju;bBp$%0Ba()2dkSN~UIjn6SBM`+4*3~^tUgZ&|D!j5i*Ahn#y#ZceZG#d-TieD z_FFli^p`>TF!VPKu`qEq+8(Bb_IO{MJScZ6qhA|wC_!ioFG+o3NJ(P6wHHpGdSv^V zrZQx*AL`j0Mo*_$hr^WF5$qQB!}qu%(O@+PT8J@Q{&aRh=jpDXe4&hnY9`2|x(yk& z^r9h!N+f%jaJj^$V^5U*)C zekpFdkMLm*LlVO}FljnJ`C));`p8kv=$mk0X+vo6@&{MA6K;0KsI+;ZLx34Y!eZEUB3lwIk>f44zx$=Ts>l@K7c;jsZ3kLg4{rTEdCJsL1>?0!hSpx zw6E@BjupW}2{%9KZHPC=P(WcI2!8@GN`pkL~Dd-rgoJDOc27e z4*0FMG>5y2N}|-yg|-m=Uo(>}4o-_ibx9pZ z0_WPlg3ysCw3hraIF0n5^rjT@Fzhks4>N_2qa;f1w+zZ@k&A)cK(}$ zKU0H(fUD}i{9QqSIPQ4mM)xR)p z`&fM;*L{D%u%7OK-{6foo+ab+=AghA0oHbb7o;X~h;QNuOHK3VB(=pgj#i>A znboa>R*$irGlZ1mlEP@aMPFC{>v8CZApL=0%ThV7S~+Z4>^-?3IoS`_-kc_m@x$w2 zbUEk;pVU#kPLZ2kiOl;u%#-1w?Y4uuhK zdv(i#plXT-^Vv%}t7q%ikw}hRg3Xb9Lf>WoE35z8fx}ZTsYMDRcoMIF)S`yo?hx>} z#Q|~R*Q|YICUjo-XF&KF@AF6jSa|dTu}WnBiS%%PL)`>ohilM=xwxiJ{j*>5a-S!w zsZfKBB7|`8)=(81RUOS=dDFAzkc5M-s@u8e%PrA)OT3R1l!WLPvAXFL5*^ zj6UXT)YzJ?B7pw3ISho{Oqm#Fwe>Z_1 z4Ix6Y)D{E@>(3VGpqi^#)CybbG2}wAqBE`@lz#I1kSg*BSY|lO0fR&*{bdAzk!wxm z=Y!#ow5}h#sv3mk*y~K)%{JBTNvss%TDp8c>hGry6M1f-b&P;6Q~aX0-9M5g3swtW z4t9(-kJsD9W?K|ro0*EfwIX=*7FgWjj-0E!b!?j;%e=gkOB zfJ&}z5ZNBoO+Oy#`GdG~|2s!9Se}}+y0fc6Jk09>!%&BQ84~USsD2|k0|3n}DPn{* zI6fT|i`WK0jOyJ725t0>UV9 zCHUrSgYtKB-Awi{Ga;iE*H9hqbPK3S4lkG-&yR8bBzB9c(Evj|LQe)B1NwoRG{SJhQno8V#hrZyGT=p?y-8@UmkDqL-GP2}6bzWgpxxga5qui3Qu< zQ+o^4a@Xz0Dy~6!Ib<|*)eR_yefA)~5@54>Qrl-n|?0~mQE4-4(<>EWKz)IVL?M4g`Hc=+T zP1yqdX%@shY!`nGL~SXJjz@T70N!230?`p|4;C-UQZXOjeg5QS04~u0FadoG6DEPi zyZYAav=txvWMsnx4*26QI<4M5hxXZtM_}aJu!;d%r(N+c2`{s2K_4nhR|chtj`+FyyNo$MNxHxCRswyq zV{6SzDx9g;JpE2k2nDIkJ{II5{Uotw%4sIb+f1q%tyqTk?EBkB-wU@kyi<8_9I&$? z&zCMmX$E3wG=s@eHqbokR}kglD_9ps)QlzGF%2QDUXF1MsEC@OfGS4#Qb4J9$KeXPT3yI;x_4UhR+K zY={ody#6RYOQtjt{<7aD@Uk0+>9r>K;v=KT!i)nTw2fe-;T`2OKd7VBg4zEp)m)BEDqa-^!gTBu3AAet770gla8Tl#wWy@;B~Y?C4yv*4Eoo1THI4gsHOToN1U zK2KBzDUqif5bGAANcLRGsJM5ziUo1{g)oj!qzgr^1#m)rPpMn5-foVs5*-tobd~_a zH}mb+Jj5bAT``^(G5f4aw1zA;gK=|86UYwO(m~hGpIT{}2aO}PYt^d&7fALVr$IP!JDTuA~hvTq^1NeZ7wFZ>g!C@At6e{HKqQ{6x#0bQ2 z2nKxrxn&?BAb7&rnDD#(zL|rbaoagtS$m_D|3uD) z58UoP0s`T0WG(pE|4nwX!Mp|gSRf)e(U62!tF8#@hCbE?gmU8j3G59&*$ckkR(}3x z6C#I$pSio-*5k&7A4dGZiGsjL0V*Aub+|VG(pos@7RmS%4&D?D;r0UT-*9_yoQ0>U zv$>Ns_id$bvRfo8{6j(*PtW_)mkZ43>&dvToHKCulljihlT85{ZGIpYy^ZGxF~;@qWd}`g0+*mqo?^(Yi?d%UZq=PV*w_D zxg}QLqv+s|4>=dy63D*!{t2n&gMbiJ{cmL2!LLX`VE8T4YXuph^}R#sc%-Uvi)nhthbYwbn$qN@q!Xw%KE=;Tmg=I_+QAUi@zfI z0FYax`8`qu|EMfb5$xM6%3cn=m)PsIKOqIj5fEtaApUEVm%kzT1txBhBz}YltMX+b z=3w{g&--c7BXo_Se?l(9y;}l&_P@re;+W?%jT^$N(8jX5&Nz9*+r|_Nv_uv=&KJiI5)L$kp1lYMn#**SAoWVB7UqT7e zB&(!~yGnzm|AfqhBUj=3??_zEUy;K6r?<$od-w>xUpBH9BX=M?qP#UJed1AnLWUY6 zAY8(|@9)SjT)!fLyf;6Pc(@2n1%7dqti?^9C?f&91;XF{g#2iPfM6Z^Z$DHM14o*= zxq5qY+W~;^z6uD4#Qm5E=kZG;YK0?JtNgZDQ)@are**s6;}LE`e{BO>zX1RDj*ou= z{@U{4KR9>;h%?mxR|AMY5r6Gm@DDNl9QFV1WAG>3uldJ6I65t~|4%leu7rwqdl4o4 O&k`QqW8jw|Ap8$J?>Jci literal 0 HcmV?d00001 From 629c376f02440fdbc704645c9568327044b939dc Mon Sep 17 00:00:00 2001 From: Tom Callahan Date: Sun, 24 Jun 2018 15:39:56 -0400 Subject: [PATCH 89/92] Close xcontent parsers (partial) (#31513) Partial pass at closing XContentParsers in server. This mostly involved adding try-with-resources statements around the usage of XContentParsers. --- .../template/put/PutIndexTemplateRequest.java | 7 +- .../ClusterUpdateSettingsRequestTests.java | 11 +- .../create/CreateIndexRequestTests.java | 9 +- .../mapping/put/PutMappingRequestTests.java | 7 +- .../indices/shrink/ResizeRequestTests.java | 5 +- .../action/get/MultiGetRequestTests.java | 30 +- .../action/get/MultiGetResponseTests.java | 10 +- .../search/MultiSearchResponseTests.java | 8 +- .../cluster/metadata/MetaDataTests.java | 13 +- .../common/geo/BaseGeoParsingTestCase.java | 14 +- .../common/geo/GeoJsonShapeParserTests.java | 260 ++++++++++------- .../common/geo/GeoUtilTests.java | 17 +- .../AbstractShapeBuilderTestCase.java | 13 +- .../common/unit/FuzzinessTests.java | 94 +++--- .../common/xcontent/BaseXContentTestCase.java | 211 ++++++------- .../builder/XContentBuilderTests.java | 61 ++-- .../cbor/CborXContentParserTests.java | 7 +- .../common/xcontent/cbor/JsonVsCborTests.java | 6 +- .../xcontent/smile/JsonVsSmileTests.java | 6 +- .../AbstractXContentFilteringTestCase.java | 6 +- .../org/elasticsearch/index/IndexTests.java | 7 +- .../index/query/BoolQueryBuilderTests.java | 7 +- .../index/query/InnerHitBuilderTests.java | 11 +- .../search/geo/GeoPointParsingTests.java | 93 +++--- .../index/search/geo/GeoUtilsTests.java | 191 ++++++------ .../ingest/IngestMetadataTests.java | 19 +- .../PersistentTasksCustomMetaDataTests.java | 6 +- .../repositories/RepositoryDataTests.java | 35 ++- .../admin/indices/RestAnalyzeActionTests.java | 141 ++++----- .../org/elasticsearch/script/ScriptTests.java | 8 +- .../search/NestedIdentityTests.java | 9 +- .../AggregationCollectorTests.java | 15 +- .../BasePipelineAggregationTestCase.java | 15 +- .../aggregations/bucket/FiltersTests.java | 59 ++-- .../SignificanceHeuristicTests.java | 12 +- .../support/IncludeExcludeTests.java | 70 ++--- .../builder/SearchSourceBuilderTests.java | 4 +- .../highlight/HighlightBuilderTests.java | 76 ++--- .../highlight/HighlightFieldTests.java | 19 +- .../rescore/QueryRescorerBuilderTests.java | 71 +++-- .../searchafter/SearchAfterBuilderTests.java | 28 +- .../search/slice/SliceBuilderTests.java | 11 +- .../search/sort/AbstractSortTestCase.java | 31 +- .../search/sort/FieldSortBuilderTests.java | 17 +- .../sort/GeoDistanceSortBuilderTests.java | 25 +- .../search/sort/NestedSortBuilderTests.java | 13 +- .../search/sort/ScriptSortBuilderTests.java | 97 +++--- .../search/sort/SortBuilderTests.java | 13 +- .../AbstractSuggestionBuilderTestCase.java | 15 +- .../search/suggest/SuggestBuilderTests.java | 11 +- .../CategoryContextMappingTests.java | 276 +++++++++--------- .../phrase/DirectCandidateGeneratorTests.java | 20 +- .../phrase/SmoothingModelTestCase.java | 13 +- 53 files changed, 1197 insertions(+), 1036 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 5d4e558dbb25b..5afba8f66aed3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -558,9 +558,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject("mappings"); for (Map.Entry entry : mappings.entrySet()) { builder.field(entry.getKey()); - XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, entry.getValue()); - builder.copyCurrentStructure(parser); + try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, entry.getValue())) { + builder.copyCurrentStructure(parser); + } } builder.endObject(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java index c358d0fb6ca52..9701e76619824 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java @@ -58,12 +58,13 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws assertThat(iae.getMessage(), containsString("[cluster_update_settings_request] unknown field [" + unsupportedField + "], parser not found")); } else { - XContentParser parser = createParser(xContentType.xContent(), originalBytes); - ClusterUpdateSettingsRequest parsedRequest = ClusterUpdateSettingsRequest.fromXContent(parser); + try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { + ClusterUpdateSettingsRequest parsedRequest = ClusterUpdateSettingsRequest.fromXContent(parser); - assertNull(parser.nextToken()); - assertThat(parsedRequest.transientSettings(), equalTo(request.transientSettings())); - assertThat(parsedRequest.persistentSettings(), equalTo(request.persistentSettings())); + assertNull(parser.nextToken()); + assertThat(parsedRequest.transientSettings(), equalTo(request.transientSettings())); + assertThat(parsedRequest.persistentSettings(), equalTo(request.persistentSettings())); + } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java index e50805ab5b263..1c27934927413 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -134,11 +134,12 @@ public static void assertMappingsEqual(Map expected, Map expectedEntry : expected.entrySet()) { String expectedValue = expectedEntry.getValue(); String actualValue = actual.get(expectedEntry.getKey()); - XContentParser expectedJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + try (XContentParser expectedJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, expectedValue); - XContentParser actualJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, actualValue); - assertEquals(expectedJson.map(), actualJson.map()); + XContentParser actualJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, actualValue)){ + assertEquals(expectedJson.map(), actualJson.map()); + } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index e816b08187f1b..be44d790b4004 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -147,9 +147,10 @@ public void testToAndFromXContent() throws IOException { private void assertMappingsEqual(String expected, String actual) throws IOException { - XContentParser expectedJson = createParser(XContentType.JSON.xContent(), expected); - XContentParser actualJson = createParser(XContentType.JSON.xContent(), actual); - assertEquals(expectedJson.mapOrdered(), actualJson.mapOrdered()); + try (XContentParser expectedJson = createParser(XContentType.JSON.xContent(), expected); + XContentParser actualJson = createParser(XContentType.JSON.xContent(), actual)) { + assertEquals(expectedJson.mapOrdered(), actualJson.mapOrdered()); + } } /** diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java index 4fa99374f0fab..ffbab5805c0a6 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.test.ESTestCase; @@ -93,7 +94,9 @@ public void testToAndFromXContent() throws IOException { ResizeRequest parsedResizeRequest = new ResizeRequest(resizeRequest.getTargetIndexRequest().index(), resizeRequest.getSourceIndex()); - parsedResizeRequest.fromXContent(createParser(xContentType.xContent(), originalBytes)); + try (XContentParser xParser = createParser(xContentType.xContent(), originalBytes)) { + parsedResizeRequest.fromXContent(xParser); + } assertEquals(resizeRequest.getSourceIndex(), parsedResizeRequest.getSourceIndex()); assertEquals(resizeRequest.getTargetIndexRequest().index(), parsedResizeRequest.getTargetIndexRequest().index()); diff --git a/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java b/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java index f1de226704e53..fcb4539c9afe7 100644 --- a/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java @@ -54,9 +54,9 @@ public void testAddWithInvalidKey() throws IOException { builder.endArray(); } builder.endObject(); - final XContentParser parser = createParser(builder); - final MultiGetRequest mgr = new MultiGetRequest(); - final ParsingException e = expectThrows( + try (XContentParser parser = createParser(builder)) { + final MultiGetRequest mgr = new MultiGetRequest(); + final ParsingException e = expectThrows( ParsingException.class, () -> { final String defaultIndex = randomAlphaOfLength(5); @@ -64,9 +64,10 @@ public void testAddWithInvalidKey() throws IOException { final FetchSourceContext fetchSource = FetchSourceContext.FETCH_SOURCE; mgr.add(defaultIndex, defaultType, null, fetchSource, null, parser, true); }); - assertThat( + assertThat( e.toString(), containsString("unknown key [doc] for a START_ARRAY, expected [docs] or [ids]")); + } } public void testUnexpectedField() throws IOException { @@ -141,16 +142,17 @@ public void testXContentSerialization() throws IOException { MultiGetRequest expected = createTestInstance(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); - XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); - MultiGetRequest actual = new MultiGetRequest(); - actual.add(null, null, null, null, null, parser, true); - assertThat(parser.nextToken(), nullValue()); - - assertThat(actual.items.size(), equalTo(expected.items.size())); - for (int i = 0; i < expected.items.size(); i++) { - MultiGetRequest.Item expectedItem = expected.items.get(i); - MultiGetRequest.Item actualItem = actual.items.get(i); - assertThat(actualItem, equalTo(expectedItem)); + try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + MultiGetRequest actual = new MultiGetRequest(); + actual.add(null, null, null, null, null, parser, true); + assertThat(parser.nextToken(), nullValue()); + + assertThat(actual.items.size(), equalTo(expected.items.size())); + for (int i = 0; i < expected.items.size(); i++) { + MultiGetRequest.Item expectedItem = expected.items.get(i); + MultiGetRequest.Item actualItem = actual.items.get(i); + assertThat(actualItem, equalTo(expectedItem)); + } } } } diff --git a/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java b/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java index 1eae583316e15..6331d5ef31dff 100644 --- a/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java @@ -39,10 +39,11 @@ public void testFromXContent() throws IOException { MultiGetResponse expected = createTestInstance(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); - - XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); - MultiGetResponse parsed = MultiGetResponse.fromXContent(parser); - assertNull(parser.nextToken()); + MultiGetResponse parsed; + try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + parsed = MultiGetResponse.fromXContent(parser); + assertNull(parser.nextToken()); + } assertNotSame(expected, parsed); assertThat(parsed.getResponses().length, equalTo(expected.getResponses().length)); @@ -60,6 +61,7 @@ public void testFromXContent() throws IOException { assertThat(actualItem.getResponse(), equalTo(expectedItem.getResponse())); } } + } } diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java index 874bea5ff657e..4f1fa4cf06116 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java @@ -40,9 +40,11 @@ public void testFromXContent() throws IOException { MultiSearchResponse expected = createTestInstance(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); - XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); - MultiSearchResponse actual = MultiSearchResponse.fromXContext(parser); - assertThat(parser.nextToken(), nullValue()); + MultiSearchResponse actual; + try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + actual = MultiSearchResponse.fromXContext(parser); + assertThat(parser.nextToken(), nullValue()); + } assertThat(actual.getTook(), equalTo(expected.getTook())); assertThat(actual.getResponses().length, equalTo(expected.getResponses().length)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 96a533118c8da..32dd4324ff835 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -181,8 +181,7 @@ public void testUnknownFieldClusterMetaData() throws IOException { .field("random", "value") .endObject() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, metadata); - try { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, metadata)) { MetaData.Builder.fromXContent(parser); fail(); } catch (IllegalArgumentException e) { @@ -197,8 +196,7 @@ public void testUnknownFieldIndexMetaData() throws IOException { .field("random", "value") .endObject() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, metadata); - try { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, metadata)) { IndexMetaData.Builder.fromXContent(parser); fail(); } catch (IllegalArgumentException e) { @@ -225,9 +223,10 @@ public void testXContentWithIndexGraveyard() throws IOException { builder.startObject(); originalMeta.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - final MetaData fromXContentMeta = MetaData.fromXContent(parser); - assertThat(fromXContentMeta.indexGraveyard(), equalTo(originalMeta.indexGraveyard())); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + final MetaData fromXContentMeta = MetaData.fromXContent(parser); + assertThat(fromXContentMeta.indexGraveyard(), equalTo(originalMeta.indexGraveyard())); + } } public void testSerializationWithIndexGraveyard() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java b/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java index f7771f0f84466..023932be6a9d0 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java @@ -50,15 +50,17 @@ abstract class BaseGeoParsingTestCase extends ESTestCase { public abstract void testParseGeometryCollection() throws IOException; protected void assertValidException(XContentBuilder builder, Class expectedException) throws IOException { - XContentParser parser = createParser(builder); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, expectedException); + try (XContentParser parser = createParser(builder)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, expectedException); + } } protected void assertGeometryEquals(Shape expected, XContentBuilder geoJson) throws IOException { - XContentParser parser = createParser(geoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertEquals(expected, ShapeParser.parse(parser).build()); + try (XContentParser parser = createParser(geoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertEquals(expected, ShapeParser.parse(parser).build()); + } } protected ShapeCollection shapeCollection(Shape... shapes) { diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java index 6f9128454f374..bb462ac60342f 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java @@ -193,18 +193,20 @@ public void testParseEnvelope() throws IOException { .startArray().value(50).value(-39).endArray() .endArray() .endObject(); - XContentParser parser = createParser(multilinesGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(multilinesGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } // test #4: "envelope" with empty coordinates multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope") .startArray("coordinates") .endArray() .endObject(); - parser = createParser(multilinesGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(multilinesGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } } @Override @@ -266,9 +268,10 @@ public void testParse3DPolygon() throws IOException { Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext); - XContentParser parser = createParser(polygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertEquals(jtsGeom(expected), ShapeParser.parse(parser, mapperBuilder).build()); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertEquals(jtsGeom(expected), ShapeParser.parse(parser, mapperBuilder).build()); + } } public void testInvalidDimensionalPolygon() throws IOException { @@ -285,9 +288,10 @@ public void testInvalidDimensionalPolygon() throws IOException { .endArray() .endArray() .endObject(); - XContentParser parser = createParser(polygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } } public void testParseInvalidPoint() throws IOException { @@ -299,9 +303,10 @@ public void testParseInvalidPoint() throws IOException { .startArray().value(-74.011).value(40.753).endArray() .endArray() .endObject(); - XContentParser parser = createParser(invalidPoint1); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidPoint1)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } // test case 2: create an invalid point object with an empty number of coordinates XContentBuilder invalidPoint2 = XContentFactory.jsonBuilder() @@ -310,9 +315,10 @@ public void testParseInvalidPoint() throws IOException { .startArray("coordinates") .endArray() .endObject(); - parser = createParser(invalidPoint2); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidPoint2)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } } public void testParseInvalidMultipoint() throws IOException { @@ -322,9 +328,10 @@ public void testParseInvalidMultipoint() throws IOException { .field("type", "multipoint") .startArray("coordinates").value(-74.011).value(40.753).endArray() .endObject(); - XContentParser parser = createParser(invalidMultipoint1); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidMultipoint1)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } // test case 2: create an invalid multipoint object with null coordinate XContentBuilder invalidMultipoint2 = XContentFactory.jsonBuilder() @@ -333,9 +340,10 @@ public void testParseInvalidMultipoint() throws IOException { .startArray("coordinates") .endArray() .endObject(); - parser = createParser(invalidMultipoint2); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidMultipoint2)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } // test case 3: create a valid formatted multipoint object with invalid number (0) of coordinates XContentBuilder invalidMultipoint3 = XContentFactory.jsonBuilder() @@ -345,9 +353,10 @@ public void testParseInvalidMultipoint() throws IOException { .startArray().endArray() .endArray() .endObject(); - parser = createParser(invalidMultipoint3); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidMultipoint3)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } } public void testParseInvalidMultiPolygon() throws IOException { @@ -380,9 +389,10 @@ public void testParseInvalidMultiPolygon() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); + } } public void testParseInvalidDimensionalMultiPolygon() throws IOException { @@ -419,9 +429,10 @@ public void testParseInvalidDimensionalMultiPolygon() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } } @@ -440,11 +451,12 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - Shape shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 2: ccw poly crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -460,11 +472,12 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } // test 3: cw poly not crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -480,11 +493,12 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 4: cw poly crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -500,11 +514,12 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } } public void testParseOGCPolygonWithHoles() throws IOException { @@ -528,11 +543,12 @@ public void testParseOGCPolygonWithHoles() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - Shape shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 2: ccw poly crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -554,11 +570,12 @@ public void testParseOGCPolygonWithHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } // test 3: cw poly not crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -580,11 +597,13 @@ public void testParseOGCPolygonWithHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); + + ElasticsearchGeoAssertions.assertPolygon(shape); + } - ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: cw poly crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -606,11 +625,12 @@ public void testParseOGCPolygonWithHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } } public void testParseInvalidPolygon() throws IOException { @@ -627,9 +647,10 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } // test case 2: create an invalid polygon with only 1 point invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -640,9 +661,10 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } // test case 3: create an invalid polygon with 0 points invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -653,9 +675,10 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } // test case 4: create an invalid polygon with null value points invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -666,9 +689,10 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); + } // test case 5: create an invalid polygon with 1 invalid LinearRing invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -677,18 +701,20 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); + } // test case 6: create an invalid polygon with 0 LinearRings invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates").endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } // test case 7: create an invalid polygon with 0 LinearRings invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -697,9 +723,10 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + } } public void testParsePolygonWithHole() throws IOException { @@ -764,9 +791,10 @@ public void testParseSelfCrossingPolygon() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); + } } @Override @@ -980,11 +1008,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - XContentParser parser = createParser(polygonGeoJson); - parser.nextToken(); - Shape shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 2: valid ccw (right handed system) poly not crossing dateline (with 'ccw' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1009,11 +1038,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 3: valid ccw (right handed system) poly not crossing dateline (with 'counterclockwise' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1038,11 +1068,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 4: valid cw (left handed system) poly crossing dateline (with 'left' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1067,11 +1098,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } // test 5: valid cw multipoly (left handed system) poly crossing dateline (with 'cw' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1096,11 +1128,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } // test 6: valid cw multipoly (left handed system) poly crossing dateline (with 'clockwise' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1125,10 +1158,11 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } } } diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoUtilTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoUtilTests.java index efec56e788da1..f23e89ecb2bf7 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoUtilTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoUtilTests.java @@ -59,13 +59,14 @@ private int parsePrecision(CheckedConsumer tokenGe XContentBuilder builder = jsonBuilder().startObject(); tokenGenerator.accept(builder); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); // { - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); // field name - assertTrue(parser.nextToken().isValue()); // field value - int precision = GeoUtils.parsePrecision(parser); - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); // } - assertNull(parser.nextToken()); // no more tokens - return precision; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); // { + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); // field name + assertTrue(parser.nextToken().isValue()); // field value + int precision = GeoUtils.parsePrecision(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); // } + assertNull(parser.nextToken()); // no more tokens + return precision; + } } } diff --git a/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 5ac55832959d7..5f2c721533de9 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -79,12 +79,13 @@ public void testFromXContent() throws IOException { } XContentBuilder builder = testShape.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser shapeContentParser = createParser(shuffled); - shapeContentParser.nextToken(); - ShapeBuilder parsedShape = ShapeParser.parse(shapeContentParser); - assertNotSame(testShape, parsedShape); - assertEquals(testShape, parsedShape); - assertEquals(testShape.hashCode(), parsedShape.hashCode()); + try (XContentParser shapeContentParser = createParser(shuffled)) { + shapeContentParser.nextToken(); + ShapeBuilder parsedShape = ShapeParser.parse(shapeContentParser); + assertNotSame(testShape, parsedShape); + assertEquals(testShape, parsedShape); + assertEquals(testShape.hashCode(), parsedShape.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java b/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java index 79b6aa5f60436..0074da43fcfb8 100644 --- a/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java +++ b/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java @@ -45,13 +45,14 @@ public void testParseFromXContent() throws IOException { XContentBuilder json = jsonBuilder().startObject() .field(Fuzziness.X_FIELD_NAME, floatValue) .endObject(); - XContentParser parser = createParser(json); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_NUMBER)); - Fuzziness fuzziness = Fuzziness.parse(parser); - assertThat(fuzziness.asFloat(), equalTo(floatValue)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + try (XContentParser parser = createParser(json)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_NUMBER)); + Fuzziness fuzziness = Fuzziness.parse(parser); + assertThat(fuzziness.asFloat(), equalTo(floatValue)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + } } { Integer intValue = frequently() ? randomIntBetween(0, 2) : randomIntBetween(0, 100); @@ -63,28 +64,29 @@ public void testParseFromXContent() throws IOException { XContentBuilder json = jsonBuilder().startObject() .field(Fuzziness.X_FIELD_NAME, randomBoolean() ? value.toString() : value) .endObject(); - XContentParser parser = createParser(json); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING))); - Fuzziness fuzziness = Fuzziness.parse(parser); - if (value.intValue() >= 1) { - assertThat(fuzziness.asDistance(), equalTo(Math.min(2, value.intValue()))); - } - assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); - if (intValue.equals(value)) { - switch (intValue) { - case 1: - assertThat(fuzziness, sameInstance(Fuzziness.ONE)); - break; - case 2: - assertThat(fuzziness, sameInstance(Fuzziness.TWO)); - break; - case 0: - assertThat(fuzziness, sameInstance(Fuzziness.ZERO)); - break; - default: - break; + try (XContentParser parser = createParser(json)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING))); + Fuzziness fuzziness = Fuzziness.parse(parser); + if (value.intValue() >= 1) { + assertThat(fuzziness.asDistance(), equalTo(Math.min(2, value.intValue()))); + } + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + if (intValue.equals(value)) { + switch (intValue) { + case 1: + assertThat(fuzziness, sameInstance(Fuzziness.ONE)); + break; + case 2: + assertThat(fuzziness, sameInstance(Fuzziness.TWO)); + break; + case 0: + assertThat(fuzziness, sameInstance(Fuzziness.ZERO)); + break; + default: + break; + } } } } @@ -102,15 +104,16 @@ public void testParseFromXContent() throws IOException { .field(Fuzziness.X_FIELD_NAME, auto) .endObject(); } - XContentParser parser = createParser(json); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); - Fuzziness fuzziness = Fuzziness.parse(parser); - if (isDefaultAutoFuzzinessTested) { - assertThat(fuzziness, sameInstance(Fuzziness.AUTO)); + try (XContentParser parser = createParser(json)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); + Fuzziness fuzziness = Fuzziness.parse(parser); + if (isDefaultAutoFuzzinessTested) { + assertThat(fuzziness, sameInstance(Fuzziness.AUTO)); + } + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); } - assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); } } @@ -152,15 +155,16 @@ public void testSerializationCustomAuto() throws IOException { .field(Fuzziness.X_FIELD_NAME, auto) .endObject(); - XContentParser parser = createParser(json); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); - Fuzziness fuzziness = Fuzziness.parse(parser); + try (XContentParser parser = createParser(json)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); + Fuzziness fuzziness = Fuzziness.parse(parser); - Fuzziness deserializedFuzziness = doSerializeRoundtrip(fuzziness); - assertEquals(fuzziness, deserializedFuzziness); - assertEquals(fuzziness.asString(), deserializedFuzziness.asString()); + Fuzziness deserializedFuzziness = doSerializeRoundtrip(fuzziness); + assertEquals(fuzziness, deserializedFuzziness); + assertEquals(fuzziness.asString(), deserializedFuzziness.asString()); + } } private static Fuzziness doSerializeRoundtrip(Fuzziness in) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 86e55c1ab6a91..690e7567e59ff 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -274,14 +274,15 @@ public void testBinaryField() throws Exception { final byte[] randomBytes = randomBytes(); BytesReference bytes = BytesReference.bytes(builder().startObject().field("binary", randomBytes).endObject()); - XContentParser parser = createParser(xcontentType().xContent(), bytes); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "binary"); - assertTrue(parser.nextToken().isValue()); - assertArrayEquals(randomBytes, parser.binaryValue()); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), bytes)) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "binary"); + assertTrue(parser.nextToken().isValue()); + assertArrayEquals(randomBytes, parser.binaryValue()); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testBinaryValue() throws Exception { @@ -290,14 +291,15 @@ public void testBinaryValue() throws Exception { final byte[] randomBytes = randomBytes(); BytesReference bytes = BytesReference.bytes(builder().startObject().field("binary").value(randomBytes).endObject()); - XContentParser parser = createParser(xcontentType().xContent(), bytes); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "binary"); - assertTrue(parser.nextToken().isValue()); - assertArrayEquals(randomBytes, parser.binaryValue()); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), bytes)) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "binary"); + assertTrue(parser.nextToken().isValue()); + assertArrayEquals(randomBytes, parser.binaryValue()); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testBinaryValueWithOffsetLength() throws Exception { @@ -315,14 +317,15 @@ public void testBinaryValueWithOffsetLength() throws Exception { } builder.endObject(); - XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "bin"); - assertTrue(parser.nextToken().isValue()); - assertArrayEquals(Arrays.copyOfRange(randomBytes, offset, offset + length), parser.binaryValue()); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder))) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "bin"); + assertTrue(parser.nextToken().isValue()); + assertArrayEquals(Arrays.copyOfRange(randomBytes, offset, offset + length), parser.binaryValue()); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testBinaryUTF8() throws Exception { @@ -333,14 +336,15 @@ public void testBinaryUTF8() throws Exception { builder.field("utf8").utf8Value(randomBytesRef.bytes, randomBytesRef.offset, randomBytesRef.length); builder.endObject(); - XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "utf8"); - assertTrue(parser.nextToken().isValue()); - assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(randomBytesRef.utf8ToString())); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder))) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "utf8"); + assertTrue(parser.nextToken().isValue()); + assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(randomBytesRef.utf8ToString())); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testText() throws Exception { @@ -351,14 +355,15 @@ public void testText() throws Exception { final BytesReference random = new BytesArray(randomBytes()); XContentBuilder builder = builder().startObject().field("text", new Text(random)).endObject(); - XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "text"); - assertTrue(parser.nextToken().isValue()); - assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(random.utf8ToString())); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder))) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "text"); + assertTrue(parser.nextToken().isValue()); + assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(random.utf8ToString())); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testReadableInstant() throws Exception { @@ -741,18 +746,19 @@ void doTestRawField(XContent source, boolean useStream) throws Exception { generator.writeEndObject(); } - XContentParser parser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("bar", parser.currentName()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("foo", parser.currentName()); - assertEquals(Token.VALUE_NULL, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); + try (XContentParser parser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray())) { + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("bar", parser.currentName()); + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("foo", parser.currentName()); + assertEquals(Token.VALUE_NULL, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } } public void testRawValue() throws Exception { @@ -776,14 +782,15 @@ void doTestRawValue(XContent source) throws Exception { generator.writeRawValue(new BytesArray(rawData).streamInput(), source.type()); } - XContentParser parser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("foo", parser.currentName()); - assertEquals(Token.VALUE_NULL, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); + try (XContentParser parser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray())) { + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("foo", parser.currentName()); + assertEquals(Token.VALUE_NULL, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } os = new ByteArrayOutputStream(); try (XContentGenerator generator = xcontentType().xContent().createGenerator(os)) { @@ -793,18 +800,19 @@ void doTestRawValue(XContent source) throws Exception { generator.writeEndObject(); } - parser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("test", parser.currentName()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("foo", parser.currentName()); - assertEquals(Token.VALUE_NULL, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); + try (XContentParser parser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray())) { + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("test", parser.currentName()); + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("foo", parser.currentName()); + assertEquals(Token.VALUE_NULL, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } } @@ -822,11 +830,12 @@ protected void doTestBigInteger(JsonGenerator generator, ByteArrayOutputStream o generator.flush(); byte[] serialized = os.toByteArray(); - XContentParser parser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, serialized); - Map map = parser.map(); - assertEquals("bar", map.get("foo")); - assertEquals(bigInteger, map.get("bigint")); + try (XContentParser parser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, serialized)) { + Map map = parser.map(); + assertEquals("bar", map.get("foo")); + assertEquals(bigInteger, map.get("bigint")); + } } public void testEnsureNameNotNull() { @@ -984,44 +993,46 @@ public void testChecksForDuplicates() throws Exception { .field("key", 1) .field("key", 2) .endObject(); - - JsonParseException pex = expectThrows(JsonParseException.class, () -> createParser(builder).map()); - assertThat(pex.getMessage(), startsWith("Duplicate field 'key'")); + try (XContentParser xParser = createParser(builder)) { + JsonParseException pex = expectThrows(JsonParseException.class, () -> xParser.map()); + assertThat(pex.getMessage(), startsWith("Duplicate field 'key'")); + } } public void testNamedObject() throws IOException { Object test1 = new Object(); Object test2 = new Object(); NamedXContentRegistry registry = new NamedXContentRegistry(Arrays.asList( - new NamedXContentRegistry.Entry(Object.class, new ParseField("test1"), p -> test1), - new NamedXContentRegistry.Entry(Object.class, new ParseField("test2", "deprecated"), p -> test2), - new NamedXContentRegistry.Entry(Object.class, new ParseField("str"), p -> p.text()))); + new NamedXContentRegistry.Entry(Object.class, new ParseField("test1"), p -> test1), + new NamedXContentRegistry.Entry(Object.class, new ParseField("test2", "deprecated"), p -> test2), + new NamedXContentRegistry.Entry(Object.class, new ParseField("str"), p -> p.text()))); XContentBuilder b = XContentBuilder.builder(xcontentType().xContent()); b.value("test"); - XContentParser p = xcontentType().xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, - BytesReference.bytes(b).streamInput()); - assertEquals(test1, p.namedObject(Object.class, "test1", null)); - assertEquals(test2, p.namedObject(Object.class, "test2", null)); - assertEquals(test2, p.namedObject(Object.class, "deprecated", null)); - assertWarnings("Deprecated field [deprecated] used, expected [test2] instead"); - { + try (XContentParser p = xcontentType().xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, + BytesReference.bytes(b).streamInput())) { + assertEquals(test1, p.namedObject(Object.class, "test1", null)); + assertEquals(test2, p.namedObject(Object.class, "test2", null)); + assertEquals(test2, p.namedObject(Object.class, "deprecated", null)); + assertWarnings("Deprecated field [deprecated] used, expected [test2] instead"); p.nextToken(); assertEquals("test", p.namedObject(Object.class, "str", null)); - NamedObjectNotFoundException e = expectThrows(NamedObjectNotFoundException.class, + { + NamedObjectNotFoundException e = expectThrows(NamedObjectNotFoundException.class, () -> p.namedObject(Object.class, "unknown", null)); - assertThat(e.getMessage(), endsWith("unable to parse Object with name [unknown]: parser not found")); - } - { - Exception e = expectThrows(NamedObjectNotFoundException.class, () -> p.namedObject(String.class, "doesn't matter", null)); - assertEquals("unknown named object category [java.lang.String]", e.getMessage()); + assertThat(e.getMessage(), endsWith("unable to parse Object with name [unknown]: parser not found")); + } + { + Exception e = expectThrows(NamedObjectNotFoundException.class, () -> p.namedObject(String.class, "doesn't matter", null)); + assertEquals("unknown named object category [java.lang.String]", e.getMessage()); + } } - { - XContentParser emptyRegistryParser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, new byte[] {}); + try (XContentParser emptyRegistryParser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, new byte[] {})) { Exception e = expectThrows(NamedObjectNotFoundException.class, - () -> emptyRegistryParser.namedObject(String.class, "doesn't matter", null)); + () -> emptyRegistryParser.namedObject(String.class, "doesn't matter", null)); assertEquals("named objects are not supported for this parser", e.getMessage()); } + } private static void expectUnclosedException(ThrowingRunnable runnable) { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java index cb666418b6cac..07338d9286b70 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java @@ -216,43 +216,44 @@ public void testCopyCurrentStructure() throws Exception { } builder.field("fakefield", terms).endObject().endObject().endObject(); - - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - XContentBuilder filterBuilder = null; XContentParser.Token token; - String currentFieldName = null; - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if ("test".equals(currentFieldName)) { - assertThat(parser.text(), equalTo("test field")); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if ("filter".equals(currentFieldName)) { - filterBuilder = XContentFactory.contentBuilder(parser.contentType()); - filterBuilder.copyCurrentStructure(parser); + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + + String currentFieldName = null; + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if ("test".equals(currentFieldName)) { + assertThat(parser.text(), equalTo("test field")); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if ("filter".equals(currentFieldName)) { + filterBuilder = XContentFactory.contentBuilder(parser.contentType()); + filterBuilder.copyCurrentStructure(parser); + } } } } - assertNotNull(filterBuilder); - parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(filterBuilder)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.currentName(), equalTo("terms")); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.currentName(), equalTo("fakefield")); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_ARRAY)); - int i = 0; - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - assertThat(parser.text(), equalTo(terms.get(i++))); - } + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(filterBuilder))) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.currentName(), equalTo("terms")); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.currentName(), equalTo("fakefield")); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_ARRAY)); + int i = 0; + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + assertThat(parser.text(), equalTo(terms.get(i++))); + } - assertThat(i, equalTo(terms.size())); + assertThat(i, equalTo(terms.size())); + } } public void testHandlingOfPath() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java index 146b83c8c17a9..0e682e8be66c1 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java @@ -33,9 +33,10 @@ public void testEmptyValue() throws IOException { for (int i = 0; i < 2; i++) { // Running this part twice triggers the issue. // See https://github.com/elastic/elasticsearch/issues/8629 - XContentParser parser = createParser(CborXContent.cborXContent, ref); - while (parser.nextToken() != null) { - parser.charBuffer(); + try (XContentParser parser = createParser(CborXContent.cborXContent, ref)) { + while (parser.nextToken() != null) { + parser.charBuffer(); + } } } } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java index e165425400eb5..b10cce71f718a 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java @@ -62,8 +62,10 @@ public void testCompareParsingTokens() throws IOException { xsonGen.close(); jsonGen.close(); - - verifySameTokens(createParser(JsonXContent.jsonXContent, jsonOs.bytes()), createParser(CborXContent.cborXContent, xsonOs.bytes())); + try (XContentParser json0sParser = createParser(JsonXContent.jsonXContent, jsonOs.bytes()); + XContentParser xson0sParser = createParser(CborXContent.cborXContent, xsonOs.bytes())) { + verifySameTokens(json0sParser, xson0sParser); + } } private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java index 47913a5481e33..7f909df694f8e 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java @@ -63,8 +63,10 @@ public void testCompareParsingTokens() throws IOException { xsonGen.close(); jsonGen.close(); - verifySameTokens(createParser(JsonXContent.jsonXContent, jsonOs.bytes()), - createParser(SmileXContent.smileXContent, xsonOs.bytes())); + try (XContentParser jsonParser = createParser(JsonXContent.jsonXContent, jsonOs.bytes()); + XContentParser smileParser = createParser(SmileXContent.smileXContent, xsonOs.bytes())) { + verifySameTokens(jsonParser, smileParser); + } } private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java index 1d12defe6988d..4aa19b78a5ca0 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java @@ -75,15 +75,15 @@ static void assertXContentBuilderAsString(final XContentBuilder expected, final } static void assertXContentBuilderAsBytes(final XContentBuilder expected, final XContentBuilder actual) { - try { - XContent xContent = XContentFactory.xContent(actual.contentType()); + XContent xContent = XContentFactory.xContent(actual.contentType()); + try ( XContentParser jsonParser = xContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(expected).streamInput()); XContentParser testParser = xContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(actual).streamInput()); - + ) { while (true) { XContentParser.Token token1 = jsonParser.nextToken(); XContentParser.Token token2 = testParser.nextToken(); diff --git a/server/src/test/java/org/elasticsearch/index/IndexTests.java b/server/src/test/java/org/elasticsearch/index/IndexTests.java index f1360071745d0..9b0ca1978075a 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexTests.java @@ -56,9 +56,10 @@ public void testXContent() throws IOException { final Index original = new Index(name, uuid); final XContentBuilder builder = JsonXContent.contentBuilder(); original.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - parser.nextToken(); // the beginning of the parser - assertThat(Index.fromXContent(parser), equalTo(original)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + parser.nextToken(); // the beginning of the parser + assertThat(Index.fromXContent(parser), equalTo(original)); + } } public void testEquals() { diff --git a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index a417cba13b9a4..362adf4a4c996 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; @@ -169,8 +170,10 @@ public void testIllegalArguments() { public void testEmptyBooleanQuery() throws Exception { XContentBuilder contentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); contentBuilder.startObject().startObject("bool").endObject().endObject(); - Query parsedQuery = parseQuery(createParser(contentBuilder)).toQuery(createShardContext()); - assertThat(parsedQuery, Matchers.instanceOf(MatchAllDocsQuery.class)); + try (XContentParser xParser = createParser(contentBuilder)) { + Query parsedQuery = parseQuery(xParser).toQuery(createShardContext()); + assertThat(parsedQuery, Matchers.instanceOf(MatchAllDocsQuery.class)); + } } public void testDefaultMinShouldMatch() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index a2068a666f44c..95a91e1668c3e 100644 --- a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -124,11 +124,12 @@ public void testFromAndToXContent() throws Exception { innerHit.toXContent(builder, ToXContent.EMPTY_PARAMS); //fields is printed out as an object but parsed into a List where order matters, we disable shuffling XContentBuilder shuffled = shuffleXContent(builder, "fields"); - XContentParser parser = createParser(shuffled); - InnerHitBuilder secondInnerHits = InnerHitBuilder.fromXContent(parser); - assertThat(innerHit, not(sameInstance(secondInnerHits))); - assertThat(innerHit, equalTo(secondInnerHits)); - assertThat(innerHit.hashCode(), equalTo(secondInnerHits.hashCode())); + try (XContentParser parser = createParser(shuffled)) { + InnerHitBuilder secondInnerHits = InnerHitBuilder.fromXContent(parser); + assertThat(innerHit, not(sameInstance(secondInnerHits))); + assertThat(innerHit, equalTo(secondInnerHits)); + assertThat(innerHit.hashCode(), equalTo(secondInnerHits.hashCode())); + } } } diff --git a/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java b/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java index 4b580aa6a2467..b116c61d27c28 100644 --- a/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java @@ -107,16 +107,17 @@ public void testInvalidPointEmbeddedObject() throws IOException { content.endObject(); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); - - XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser2.nextToken(); - e = expectThrows(ElasticsearchParseException.class, () -> - GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } + try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser2.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } } public void testInvalidPointLatHashMix() throws IOException { @@ -125,16 +126,17 @@ public void testInvalidPointLatHashMix() throws IOException { content.field("lat", 0).field("geohash", stringEncode(0d, 0d)); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); - - XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser2.nextToken(); - e = expectThrows(ElasticsearchParseException.class, () -> - GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); - assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + } + try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser2.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); + assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + } } public void testInvalidPointLonHashMix() throws IOException { @@ -143,17 +145,18 @@ public void testInvalidPointLonHashMix() throws IOException { content.field("lon", 0).field("geohash", stringEncode(0d, 0d)); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); - - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); - XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser2.nextToken(); - e = expectThrows(ElasticsearchParseException.class, () -> - GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); - assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + } + try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser2.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); + assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + } } public void testInvalidField() throws IOException { @@ -162,17 +165,18 @@ public void testInvalidField() throws IOException { content.field("lon", 0).field("lat", 0).field("test", 0); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); - + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } - XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser2.nextToken(); - e = expectThrows(ElasticsearchParseException.class, () -> - GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser2.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } } public void testInvalidGeoHash() throws IOException { @@ -181,11 +185,12 @@ public void testInvalidGeoHash() throws IOException { content.field("geohash", "!!!!"); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("unsupported symbol [!] in geohash [!!!!]")); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("unsupported symbol [!] in geohash [!!!!]")); + } } private XContentParser objectLatLon(double lat, double lon) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java b/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java index d390490dd225c..9fec336e2a33f 100644 --- a/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java @@ -384,29 +384,33 @@ public void testParseGeoPoint() throws IOException { double lat = randomDouble() * 180 - 90 + randomIntBetween(-1000, 1000) * 180; double lon = randomDouble() * 360 - 180 + randomIntBetween(-1000, 1000) * 360; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", lon).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - GeoPoint point = GeoUtils.parseGeoPoint(parser); - assertThat(point, equalTo(new GeoPoint(lat, lon))); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point, equalTo(new GeoPoint(lat, lon))); + } json = jsonBuilder().startObject().field("lat", String.valueOf(lat)).field("lon", String.valueOf(lon)).endObject(); - parser = createParser(json); - parser.nextToken(); - point = GeoUtils.parseGeoPoint(parser); - assertThat(point, equalTo(new GeoPoint(lat, lon))); - json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).endArray().endObject(); - parser = createParser(json); - while (parser.currentToken() != Token.START_ARRAY) { + try (XContentParser parser = createParser(json)) { parser.nextToken(); + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point, equalTo(new GeoPoint(lat, lon))); + } + json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).endArray().endObject(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.START_ARRAY) { + parser.nextToken(); + } + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point, equalTo(new GeoPoint(lat, lon))); } - point = GeoUtils.parseGeoPoint(parser); - assertThat(point, equalTo(new GeoPoint(lat, lon))); json = jsonBuilder().startObject().field("foo", lat + "," + lon).endObject(); - parser = createParser(json); - while (parser.currentToken() != Token.VALUE_STRING) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.VALUE_STRING) { + parser.nextToken(); + } + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point, equalTo(new GeoPoint(lat, lon))); } - point = GeoUtils.parseGeoPoint(parser); - assertThat(point, equalTo(new GeoPoint(lat, lon))); } } @@ -415,12 +419,13 @@ public void testParseGeoPointStringZValueError() throws IOException { double lon = randomDouble() * 360 - 180 + randomIntBetween(-1000, 1000) * 360; double alt = randomDouble() * 1000; XContentBuilder json = jsonBuilder().startObject().field("foo", lat + "," + lon + "," + alt).endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.VALUE_STRING) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.VALUE_STRING) { + parser.nextToken(); + } + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser, new GeoPoint(), false)); + assertThat(e.getMessage(), containsString("but [ignore_z_value] parameter is [false]")); } - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser, new GeoPoint(), false)); - assertThat(e.getMessage(), containsString("but [ignore_z_value] parameter is [false]")); } public void testParseGeoPointGeohash() throws IOException { @@ -431,74 +436,82 @@ public void testParseGeoPointGeohash() throws IOException { geohashBuilder.append(BASE_32[randomInt(BASE_32.length - 1)]); } XContentBuilder json = jsonBuilder().startObject().field("geohash", geohashBuilder.toString()).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - GeoPoint point = GeoUtils.parseGeoPoint(parser); - assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); - assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); - json = jsonBuilder().startObject().field("geohash", geohashBuilder.toString()).endObject(); - parser = createParser(json); - while (parser.currentToken() != Token.VALUE_STRING) { + try (XContentParser parser = createParser(json)) { parser.nextToken(); + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); + assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); + } + json = jsonBuilder().startObject().field("geohash", geohashBuilder.toString()).endObject(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.VALUE_STRING) { + parser.nextToken(); + } + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); + assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); } - point = GeoUtils.parseGeoPoint(parser); - assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); - assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); } } public void testParseGeoPointGeohashWrongType() throws IOException { XContentBuilder json = jsonBuilder().startObject().field("geohash", 1.0).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), containsString("geohash must be a string")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), containsString("geohash must be a string")); + } } public void testParseGeoPointLatNoLon() throws IOException { double lat = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field [lon] missing")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field [lon] missing")); + } } public void testParseGeoPointLonNoLat() throws IOException { double lon = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lon", lon).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field [lat] missing")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field [lat] missing")); + } } public void testParseGeoPointLonWrongType() throws IOException { double lat = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", false).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("longitude must be a number")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("longitude must be a number")); + } } public void testParseGeoPointLatWrongType() throws IOException { double lon = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lat", false).field("lon", lon).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("latitude must be a number")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("latitude must be a number")); + } } public void testParseGeoPointExtraField() throws IOException { double lat = 0.0; double lon = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", lon).field("foo", true).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } } public void testParseGeoPointLonLatGeoHash() throws IOException { @@ -506,10 +519,11 @@ public void testParseGeoPointLonLatGeoHash() throws IOException { double lon = 0.0; String geohash = "abcd"; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", lon).field("geohash", geohash).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), containsString("field must be either lat/lon or geohash")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), containsString("field must be either lat/lon or geohash")); + } } public void testParseGeoPointArrayTooManyValues() throws IOException { @@ -517,12 +531,13 @@ public void testParseGeoPointArrayTooManyValues() throws IOException { double lon = 0.0; double elev = 0.0; XContentBuilder json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).value(elev).endArray().endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.START_ARRAY) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.START_ARRAY) { + parser.nextToken(); + } + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("Exception parsing coordinates: found Z value [0.0] but [ignore_z_value] parameter is [false]")); } - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("Exception parsing coordinates: found Z value [0.0] but [ignore_z_value] parameter is [false]")); } public void testParseGeoPointArray3D() throws IOException { @@ -530,35 +545,38 @@ public void testParseGeoPointArray3D() throws IOException { double lon = -180.0; double elev = 0.0; XContentBuilder json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).value(elev).endArray().endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.START_ARRAY) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.START_ARRAY) { + parser.nextToken(); + } + GeoPoint point = GeoUtils.parseGeoPoint(parser, new GeoPoint(), true); + assertThat(point.lat(), equalTo(lat)); + assertThat(point.lon(), equalTo(lon)); } - GeoPoint point = GeoUtils.parseGeoPoint(parser, new GeoPoint(), true); - assertThat(point.lat(), equalTo(lat)); - assertThat(point.lon(), equalTo(lon)); } public void testParseGeoPointArrayWrongType() throws IOException { double lat = 0.0; boolean lon = false; XContentBuilder json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).endArray().endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.START_ARRAY) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.START_ARRAY) { + parser.nextToken(); + } + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("numeric value expected")); } - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("numeric value expected")); } public void testParseGeoPointInvalidType() throws IOException { XContentBuilder json = jsonBuilder().startObject().field("foo", 5).endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.VALUE_NUMBER) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.VALUE_NUMBER) { + parser.nextToken(); + } + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("geo_point expected")); } - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("geo_point expected")); } public void testPrefixTreeCellSizes() { @@ -619,9 +637,10 @@ public void testParseGeoPointGeohashPositions() throws IOException { } private GeoPoint parseGeohash(String geohash, GeoUtils.EffectivePoint effectivePoint) throws IOException { - XContentParser parser = createParser(jsonBuilder().startObject().field("geohash", geohash).endObject()); - parser.nextToken(); - return GeoUtils.parseGeoPoint(parser, new GeoPoint(), randomBoolean(), effectivePoint); + try (XContentParser parser = createParser(jsonBuilder().startObject().field("geohash", geohash).endObject())) { + parser.nextToken(); + return GeoUtils.parseGeoPoint(parser, new GeoPoint(), randomBoolean(), effectivePoint); + } } private static void assertNormalizedPoint(GeoPoint input, GeoPoint expected) { diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java index 518b775d7f802..6684544a74749 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java @@ -57,15 +57,16 @@ public void testFromXContent() throws IOException { ingestMetadata.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); XContentBuilder shuffled = shuffleXContent(builder); - final XContentParser parser = createParser(shuffled); - MetaData.Custom custom = IngestMetadata.fromXContent(parser); - assertTrue(custom instanceof IngestMetadata); - IngestMetadata m = (IngestMetadata) custom; - assertEquals(2, m.getPipelines().size()); - assertEquals("1", m.getPipelines().get("1").getId()); - assertEquals("2", m.getPipelines().get("2").getId()); - assertEquals(pipeline.getConfigAsMap(), m.getPipelines().get("1").getConfigAsMap()); - assertEquals(pipeline2.getConfigAsMap(), m.getPipelines().get("2").getConfigAsMap()); + try (XContentParser parser = createParser(shuffled)) { + MetaData.Custom custom = IngestMetadata.fromXContent(parser); + assertTrue(custom instanceof IngestMetadata); + IngestMetadata m = (IngestMetadata) custom; + assertEquals(2, m.getPipelines().size()); + assertEquals("1", m.getPipelines().get("1").getId()); + assertEquals("2", m.getPipelines().get("2").getId()); + assertEquals(pipeline.getConfigAsMap(), m.getPipelines().get("1").getConfigAsMap()); + assertEquals(pipeline2.getConfigAsMap(), m.getPipelines().get("2").getConfigAsMap()); + } } public void testDiff() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java index 5b1f74d6cdfa5..2a180cc12dd19 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java @@ -174,8 +174,10 @@ public void testSerializationContext() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(testInstance, xContentType, params, false); - XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); - PersistentTasksCustomMetaData newInstance = doParseInstance(parser); + PersistentTasksCustomMetaData newInstance; + try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + newInstance = doParseInstance(parser); + } assertNotSame(newInstance, testInstance); assertEquals(testInstance.tasks().size(), newInstance.tasks().size()); diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java index d0cf5d374897d..1d37490e2ff5f 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java @@ -62,11 +62,12 @@ public void testXContent() throws IOException { RepositoryData repositoryData = generateRandomRepoData(); XContentBuilder builder = JsonXContent.contentBuilder(); repositoryData.snapshotsToXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - long gen = (long) randomIntBetween(0, 500); - RepositoryData fromXContent = RepositoryData.snapshotsFromXContent(parser, gen); - assertEquals(repositoryData, fromXContent); - assertEquals(gen, fromXContent.getGenId()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + long gen = (long) randomIntBetween(0, 500); + RepositoryData fromXContent = RepositoryData.snapshotsFromXContent(parser, gen); + assertEquals(repositoryData, fromXContent); + assertEquals(gen, fromXContent.getGenId()); + } } public void testAddSnapshots() { @@ -166,7 +167,10 @@ public void testIndexThatReferencesAnUnknownSnapshot() throws IOException { XContentBuilder builder = XContentBuilder.builder(xContent); repositoryData.snapshotsToXContent(builder, ToXContent.EMPTY_PARAMS); - RepositoryData parsedRepositoryData = RepositoryData.snapshotsFromXContent(createParser(builder), repositoryData.getGenId()); + RepositoryData parsedRepositoryData; + try (XContentParser xParser = createParser(builder)) { + parsedRepositoryData = RepositoryData.snapshotsFromXContent(xParser, repositoryData.getGenId()); + } assertEquals(repositoryData, parsedRepositoryData); Map snapshotIds = new HashMap<>(); @@ -195,10 +199,12 @@ public void testIndexThatReferencesAnUnknownSnapshot() throws IOException { final XContentBuilder corruptedBuilder = XContentBuilder.builder(xContent); corruptedRepositoryData.snapshotsToXContent(corruptedBuilder, ToXContent.EMPTY_PARAMS); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> - RepositoryData.snapshotsFromXContent(createParser(corruptedBuilder), corruptedRepositoryData.getGenId())); - assertThat(e.getMessage(), equalTo("Detected a corrupted repository, index " + corruptedIndexId + " references an unknown " + - "snapshot uuid [_does_not_exist]")); + try (XContentParser xParser = createParser(corruptedBuilder)) { + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> + RepositoryData.snapshotsFromXContent(xParser, corruptedRepositoryData.getGenId())); + assertThat(e.getMessage(), equalTo("Detected a corrupted repository, index " + corruptedIndexId + " references an unknown " + + "snapshot uuid [_does_not_exist]")); + } } public void testIndexThatReferenceANullSnapshot() throws IOException { @@ -230,9 +236,12 @@ public void testIndexThatReferenceANullSnapshot() throws IOException { } builder.endObject(); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> - RepositoryData.snapshotsFromXContent(createParser(builder), randomNonNegativeLong())); - assertThat(e.getMessage(), equalTo("Detected a corrupted repository, index [docs/_id] references an unknown snapshot uuid [null]")); + try (XContentParser xParser = createParser(builder)) { + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> + RepositoryData.snapshotsFromXContent(xParser, randomNonNegativeLong())); + assertThat(e.getMessage(), equalTo("Detected a corrupted repository, " + + "index [docs/_id] references an unknown snapshot uuid [null]")); + } } public static RepositoryData generateRandomRepoData() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java index 66f0bd796eaef..406e9b1d36c07 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java @@ -37,28 +37,29 @@ public class RestAnalyzeActionTests extends ESTestCase { public void testParseXContentForAnalyzeRequest() throws Exception { - XContentParser content = createParser(XContentFactory.jsonBuilder() + try (XContentParser content = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") .field("tokenizer", "keyword") .array("filter", "lowercase") - .endObject()); + .endObject())) { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - RestAnalyzeAction.buildFromContent(content, analyzeRequest); + RestAnalyzeAction.buildFromContent(content, analyzeRequest); - assertThat(analyzeRequest.text().length, equalTo(1)); - assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); - assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); - assertThat(analyzeRequest.tokenFilters().size(), equalTo(1)); - for (AnalyzeRequest.NameOrDefinition filter : analyzeRequest.tokenFilters()) { - assertThat(filter.name, equalTo("lowercase")); + assertThat(analyzeRequest.text().length, equalTo(1)); + assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); + assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); + assertThat(analyzeRequest.tokenFilters().size(), equalTo(1)); + for (AnalyzeRequest.NameOrDefinition filter : analyzeRequest.tokenFilters()) { + assertThat(filter.name, equalTo("lowercase")); + } } } public void testParseXContentForAnalyzeRequestWithCustomFilters() throws Exception { - XContentParser content = createParser(XContentFactory.jsonBuilder() + try (XContentParser content = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") .field("tokenizer", "keyword") @@ -76,21 +77,22 @@ public void testParseXContentForAnalyzeRequestWithCustomFilters() throws Excepti .endObject() .endArray() .field("normalizer", "normalizer") - .endObject()); + .endObject())) { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - RestAnalyzeAction.buildFromContent(content, analyzeRequest); - - assertThat(analyzeRequest.text().length, equalTo(1)); - assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); - assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); - assertThat(analyzeRequest.tokenFilters().size(), equalTo(2)); - assertThat(analyzeRequest.tokenFilters().get(0).name, equalTo("lowercase")); - assertThat(analyzeRequest.tokenFilters().get(1).definition, notNullValue()); - assertThat(analyzeRequest.charFilters().size(), equalTo(1)); - assertThat(analyzeRequest.charFilters().get(0).definition, notNullValue()); - assertThat(analyzeRequest.normalizer(), equalTo("normalizer")); + RestAnalyzeAction.buildFromContent(content, analyzeRequest); + + assertThat(analyzeRequest.text().length, equalTo(1)); + assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); + assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); + assertThat(analyzeRequest.tokenFilters().size(), equalTo(2)); + assertThat(analyzeRequest.tokenFilters().get(0).name, equalTo("lowercase")); + assertThat(analyzeRequest.tokenFilters().get(1).definition, notNullValue()); + assertThat(analyzeRequest.charFilters().size(), equalTo(1)); + assertThat(analyzeRequest.charFilters().get(0).definition, notNullValue()); + assertThat(analyzeRequest.normalizer(), equalTo("normalizer")); + } } public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception { @@ -103,84 +105,83 @@ public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() t public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception { AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - XContentParser invalidContent = createParser(XContentFactory.jsonBuilder() + try (XContentParser invalidContent = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") .field("unknown", "keyword") - .endObject()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + .endObject())) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest)); - assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); + assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); + } } public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception { AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() + try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() .startObject() .field("explain", "fals") - .endObject()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); - assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); + .endObject())) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); + assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); + } } public void testParseXContentForAnalyzeRequestWithInvalidNormalizerThrowsException() throws Exception { AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() + try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() .startObject() .field("normalizer", true) - .endObject()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); - assertThat(e.getMessage(), startsWith("normalizer should be normalizer's name")); + .endObject())) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); + assertThat(e.getMessage(), startsWith("normalizer should be normalizer's name")); + } } public void testDeprecatedParamIn2xException() throws Exception { - { - XContentParser parser = createParser(XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("filters", "lowercase") - .endObject()); + try (XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("filters", "lowercase") + .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeRequest("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [filters]")); } - { - XContentParser parser = createParser(XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("token_filters", "lowercase") - .endObject()); + try (XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("token_filters", "lowercase") + .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeRequest("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]")); } - { - XContentParser parser = createParser(XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("char_filters", "lowercase") - .endObject()); + try (XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("char_filters", "lowercase") + .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeRequest("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]")); } - { - XContentParser parser = createParser(XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("token_filter", "lowercase") - .endObject()); + try (XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("token_filter", "lowercase") + .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeRequest("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]")); } } diff --git a/server/src/test/java/org/elasticsearch/script/ScriptTests.java b/server/src/test/java/org/elasticsearch/script/ScriptTests.java index 6e578ed910d40..8b66bb32c486e 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptTests.java @@ -89,9 +89,11 @@ public void testParse() throws IOException { Script expectedScript = createScript(); try (XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()))) { expectedScript.toXContent(builder, ToXContent.EMPTY_PARAMS); - Settings settings = Settings.fromXContent(createParser(builder)); - Script actualScript = Script.parse(settings); - assertThat(actualScript, equalTo(expectedScript)); + try (XContentParser xParser = createParser(builder)) { + Settings settings = Settings.fromXContent(xParser); + Script actualScript = Script.parse(settings); + assertThat(actualScript, equalTo(expectedScript)); + } } } } diff --git a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java index b0eb9e907618f..b1c46f3bcedf4 100644 --- a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java +++ b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java @@ -58,10 +58,11 @@ public void testFromXContent() throws IOException { builder.prettyPrint(); } builder = nestedIdentity.innerToXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(builder); - NestedIdentity parsedNestedIdentity = NestedIdentity.fromXContent(parser); - assertEquals(nestedIdentity, parsedNestedIdentity); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(builder)) { + NestedIdentity parsedNestedIdentity = NestedIdentity.fromXContent(parser); + assertEquals(nestedIdentity, parsedNestedIdentity); + assertNull(parser.nextToken()); + } } public void testToXContent() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java index 9b5d64b46bc33..9919e9dcdbbd1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java @@ -56,13 +56,14 @@ public void testNeedsScores() throws Exception { } private boolean needsScores(IndexService index, String agg) throws IOException { - XContentParser aggParser = createParser(JsonXContent.jsonXContent, agg); - aggParser.nextToken(); - SearchContext context = createSearchContext(index); - final AggregatorFactories factories = AggregatorFactories.parseAggregators(aggParser).build(context, null); - final Aggregator[] aggregators = factories.createTopLevelAggregators(); - assertEquals(1, aggregators.length); - return aggregators[0].needsScores(); + try (XContentParser aggParser = createParser(JsonXContent.jsonXContent, agg)) { + aggParser.nextToken(); + SearchContext context = createSearchContext(index); + final AggregatorFactories factories = AggregatorFactories.parseAggregators(aggParser).build(context, null); + final Aggregator[] aggregators = factories.createTopLevelAggregators(); + assertEquals(1, aggregators.length); + return aggregators[0].needsScores(); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java index 828b419909238..c7bbcfc147780 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java @@ -106,13 +106,14 @@ public void testFromXContent() throws IOException { } factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser parser = createParser(shuffled); - String contentString = factoriesBuilder.toString(); - logger.info("Content string: {}", contentString); - PipelineAggregationBuilder newAgg = parse(parser); - assertNotSame(newAgg, testAgg); - assertEquals(testAgg, newAgg); - assertEquals(testAgg.hashCode(), newAgg.hashCode()); + try (XContentParser parser = createParser(shuffled)) { + String contentString = factoriesBuilder.toString(); + logger.info("Content string: {}", contentString); + PipelineAggregationBuilder newAgg = parse(parser); + assertNotSame(newAgg, testAgg); + assertEquals(testAgg, newAgg); + assertEquals(testAgg.hashCode(), newAgg.hashCode()); + } } protected PipelineAggregationBuilder parse(XContentParser parser) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java index 4577986da270c..327a717f05c52 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java @@ -94,34 +94,37 @@ public void testOtherBucket() throws IOException { builder.startObject(); builder.startArray("filters").endArray(); builder.endObject(); - XContentParser parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); - // The other bucket is disabled by default - assertFalse(filters.otherBucket()); - - builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - builder.startObject(); - builder.startArray("filters").endArray(); - builder.field("other_bucket_key", "some_key"); - builder.endObject(); - parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - filters = FiltersAggregationBuilder.parse("agg_name", parser); - // but setting a key enables it automatically - assertTrue(filters.otherBucket()); - - builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - builder.startObject(); - builder.startArray("filters").endArray(); - builder.field("other_bucket", false); - builder.field("other_bucket_key", "some_key"); - builder.endObject(); - parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - filters = FiltersAggregationBuilder.parse("agg_name", parser); - // unless the other bucket is explicitly disabled - assertFalse(filters.otherBucket()); + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); + // The other bucket is disabled by default + assertFalse(filters.otherBucket()); + + builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder.startObject(); + builder.startArray("filters").endArray(); + builder.field("other_bucket_key", "some_key"); + builder.endObject(); + } + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); + // but setting a key enables it automatically + assertTrue(filters.otherBucket()); + + builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder.startObject(); + builder.startArray("filters").endArray(); + builder.field("other_bucket", false); + builder.field("other_bucket_key", "some_key"); + builder.endObject(); + } + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); + // unless the other bucket is explicitly disabled + assertFalse(filters.otherBucket()); + } } public void testRewrite() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index 414954a2d905b..5009594160ef7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -264,9 +264,8 @@ public void testBuilderAndParser() throws Exception { protected void checkParseException(ParseFieldRegistry significanceHeuristicParserRegistry, String faultyHeuristicDefinition, String expectedError) throws IOException { - try { - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}"); + try (XContentParser stParser = createParser(JsonXContent.jsonXContent, + "{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}")) { stParser.nextToken(); SignificantTermsAggregationBuilder.getParser(significanceHeuristicParserRegistry).parse("testagg", stParser); fail(); @@ -301,9 +300,10 @@ private static SignificanceHeuristic parseSignificanceHeuristic( protected SignificanceHeuristic parseFromString(ParseFieldRegistry significanceHeuristicParserRegistry, String heuristicString) throws IOException { - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}"); - return parseSignificanceHeuristic(significanceHeuristicParserRegistry, stParser); + try (XContentParser stParser = createParser(JsonXContent.jsonXContent, + "{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}")) { + return parseSignificanceHeuristic(significanceHeuristicParserRegistry, stParser); + } } void testBackgroundAssertions(SignificanceHeuristic heuristicIsSuperset, SignificanceHeuristic heuristicNotSuperset) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java index 6e477021a541f..dc2624dc39e40 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java @@ -214,21 +214,22 @@ private IncludeExclude serialize(IncludeExclude incExc, ParseField field) throws incExc.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(builder); - XContentParser.Token token = parser.nextToken(); - assertEquals(token, XContentParser.Token.START_OBJECT); - token = parser.nextToken(); - assertEquals(token, XContentParser.Token.FIELD_NAME); - assertEquals(field.getPreferredName(), parser.currentName()); - token = parser.nextToken(); - - if (field.getPreferredName().equalsIgnoreCase("include")) { - return IncludeExclude.parseInclude(parser); - } else if (field.getPreferredName().equalsIgnoreCase("exclude")) { - return IncludeExclude.parseExclude(parser); - } else { - throw new IllegalArgumentException( + try (XContentParser parser = createParser(builder)) { + XContentParser.Token token = parser.nextToken(); + assertEquals(token, XContentParser.Token.START_OBJECT); + token = parser.nextToken(); + assertEquals(token, XContentParser.Token.FIELD_NAME); + assertEquals(field.getPreferredName(), parser.currentName()); + token = parser.nextToken(); + + if (field.getPreferredName().equalsIgnoreCase("include")) { + return IncludeExclude.parseInclude(parser); + } else if (field.getPreferredName().equalsIgnoreCase("exclude")) { + return IncludeExclude.parseExclude(parser); + } else { + throw new IllegalArgumentException( "Unexpected field name serialized in test: " + field.getPreferredName()); + } } } @@ -260,28 +261,29 @@ private IncludeExclude serializeMixedRegex(IncludeExclude incExc) throws IOExcep incExc.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(builder); - XContentParser.Token token = parser.nextToken(); - assertEquals(token, XContentParser.Token.START_OBJECT); - - IncludeExclude inc = null; - IncludeExclude exc = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - assertEquals(XContentParser.Token.FIELD_NAME, token); - if (IncludeExclude.INCLUDE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { - token = parser.nextToken(); - inc = IncludeExclude.parseInclude(parser); - } else if (IncludeExclude.EXCLUDE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { - token = parser.nextToken(); - exc = IncludeExclude.parseExclude(parser); - } else { - throw new IllegalArgumentException("Unexpected field name serialized in test: " + parser.currentName()); + try (XContentParser parser = createParser(builder)) { + XContentParser.Token token = parser.nextToken(); + assertEquals(token, XContentParser.Token.START_OBJECT); + + IncludeExclude inc = null; + IncludeExclude exc = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + assertEquals(XContentParser.Token.FIELD_NAME, token); + if (IncludeExclude.INCLUDE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { + token = parser.nextToken(); + inc = IncludeExclude.parseInclude(parser); + } else if (IncludeExclude.EXCLUDE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { + token = parser.nextToken(); + exc = IncludeExclude.parseExclude(parser); + } else { + throw new IllegalArgumentException("Unexpected field name serialized in test: " + parser.currentName()); + } } + assertNotNull(inc); + assertNotNull(exc); + // Include and Exclude clauses are parsed independently and then merged + return IncludeExclude.merge(inc, exc); } - assertNotNull(inc); - assertNotNull(exc); - // Include and Exclude clauses are parsed independently and then merged - return IncludeExclude.merge(inc, exc); } } diff --git a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 2550c0a4a444c..12c3e487ff124 100644 --- a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -64,7 +64,9 @@ public void testFromXContent() throws IOException { builder.prettyPrint(); } testSearchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertParseSearchSource(testSearchSourceBuilder, createParser(builder)); + try (XContentParser xParser = createParser(builder)) { + assertParseSearchSource(testSearchSourceBuilder, xParser); + } } public void testFromXContentInvalid() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 95da15e838c31..37359d9f20d71 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -139,17 +139,18 @@ public void testFromXContent() throws IOException { shuffled = shuffleXContent(builder, "fields"); } - XContentParser parser = createParser(shuffled); - parser.nextToken(); - HighlightBuilder secondHighlightBuilder; - try { - secondHighlightBuilder = HighlightBuilder.fromXContent(parser); - } catch (RuntimeException e) { - throw new RuntimeException("Error parsing " + highlightBuilder, e); + try (XContentParser parser = createParser(shuffled)) { + parser.nextToken(); + HighlightBuilder secondHighlightBuilder; + try { + secondHighlightBuilder = HighlightBuilder.fromXContent(parser); + } catch (RuntimeException e) { + throw new RuntimeException("Error parsing " + highlightBuilder, e); + } + assertNotSame(highlightBuilder, secondHighlightBuilder); + assertEquals(highlightBuilder, secondHighlightBuilder); + assertEquals(highlightBuilder.hashCode(), secondHighlightBuilder.hashCode()); } - assertNotSame(highlightBuilder, secondHighlightBuilder); - assertEquals(highlightBuilder, secondHighlightBuilder); - assertEquals(highlightBuilder.hashCode(), secondHighlightBuilder.hashCode()); } } @@ -179,8 +180,9 @@ public void testUnknownArrayNameExpection() throws IOException { } private T expectParseThrows(Class exceptionClass, String highlightElement) throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement); - return expectThrows(exceptionClass, () -> HighlightBuilder.fromXContent(parser)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { + return expectThrows(exceptionClass, () -> HighlightBuilder.fromXContent(parser)); + } } /** @@ -389,30 +391,32 @@ public void testParsingTagsSchema() throws IOException { String highlightElement = "{\n" + " \"tags_schema\" : \"styled\"\n" + "}\n"; - XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { - HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); - assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlightBuilder.DEFAULT_STYLED_PRE_TAG, + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlightBuilder.DEFAULT_STYLED_PRE_TAG, highlightBuilder.preTags()); - assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlightBuilder.DEFAULT_STYLED_POST_TAGS, + assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlightBuilder.DEFAULT_STYLED_POST_TAGS, highlightBuilder.postTags()); - highlightElement = "{\n" + + highlightElement = "{\n" + " \"tags_schema\" : \"default\"\n" + "}\n"; - parser = createParser(JsonXContent.jsonXContent, highlightElement); + } + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { - highlightBuilder = HighlightBuilder.fromXContent(parser); - assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlightBuilder.DEFAULT_PRE_TAGS, + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlightBuilder.DEFAULT_PRE_TAGS, highlightBuilder.preTags()); - assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlightBuilder.DEFAULT_POST_TAGS, + assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlightBuilder.DEFAULT_POST_TAGS, highlightBuilder.postTags()); - XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"tags_schema\" : \"somthing_else\"\n" + "}\n"); - assertThat(e.getMessage(), containsString("[highlight] failed to parse field [tags_schema]")); - assertEquals("Unknown tag schema [somthing_else]", e.getCause().getMessage()); + assertThat(e.getMessage(), containsString("[highlight] failed to parse field [tags_schema]")); + assertEquals("Unknown tag schema [somthing_else]", e.getCause().getMessage()); + } } /** @@ -420,22 +424,22 @@ public void testParsingTagsSchema() throws IOException { */ public void testParsingEmptyStructure() throws IOException { String highlightElement = "{ }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement); - - HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); - assertEquals("expected plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertEquals("expected plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); + } highlightElement = "{ \"fields\" : { } }"; - parser = createParser(JsonXContent.jsonXContent, highlightElement); - - highlightBuilder = HighlightBuilder.fromXContent(parser); - assertEquals("defining no field should return plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertEquals("defining no field should return plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); + } highlightElement = "{ \"fields\" : { \"foo\" : { } } }"; - parser = createParser(JsonXContent.jsonXContent, highlightElement); - - highlightBuilder = HighlightBuilder.fromXContent(parser); - assertEquals("expected HighlightBuilder with field", new HighlightBuilder().field(new Field("foo")), highlightBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertEquals("expected HighlightBuilder with field", new HighlightBuilder().field(new Field("foo")), highlightBuilder); + } } public void testPreTagsWithoutPostTags() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java index 7b27cf78ec65a..7044a7b103098 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java @@ -62,16 +62,17 @@ public void testFromXContent() throws IOException { builder.startObject(); // we need to wrap xContent output in proper object to create a parser for it builder = highlightField.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(builder); - parser.nextToken(); // skip to the opening object token, fromXContent advances from here and starts with the field name - parser.nextToken(); - HighlightField parsedField = HighlightField.fromXContent(parser); - assertEquals(highlightField, parsedField); - if (highlightField.fragments() != null) { - assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); + try (XContentParser parser = createParser(builder)) { + parser.nextToken(); // skip to the opening object token, fromXContent advances from here and starts with the field name + parser.nextToken(); + HighlightField parsedField = HighlightField.fromXContent(parser); + assertEquals(highlightField, parsedField); + if (highlightField.fragments() != null) { + assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); + } + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); } - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); } public void testToXContent() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index efd3e5ef2ca06..700b3949facf4 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -121,12 +121,13 @@ public void testFromXContent() throws IOException { XContentBuilder shuffled = shuffleXContent(builder); - XContentParser parser = createParser(shuffled); - parser.nextToken(); - RescorerBuilder secondRescoreBuilder = RescorerBuilder.parseFromXContent(parser); - assertNotSame(rescoreBuilder, secondRescoreBuilder); - assertEquals(rescoreBuilder, secondRescoreBuilder); - assertEquals(rescoreBuilder.hashCode(), secondRescoreBuilder.hashCode()); + try (XContentParser parser = createParser(shuffled)) { + parser.nextToken(); + RescorerBuilder secondRescoreBuilder = RescorerBuilder.parseFromXContent(parser); + assertNotSame(rescoreBuilder, secondRescoreBuilder); + assertEquals(rescoreBuilder, secondRescoreBuilder); + assertEquals(rescoreBuilder.hashCode(), secondRescoreBuilder.hashCode()); + } } } @@ -214,67 +215,61 @@ public MappedFieldType fieldMapper(String name) { public void testUnknownFieldsExpection() throws IOException { String rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"bad_rescorer_name\" : { }\n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"window_size\" : 20,\n" + + " \"bad_rescorer_name\" : { }\n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(NamedObjectNotFoundException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("[3:27] unable to parse RescorerBuilder with name [bad_rescorer_name]: parser not found", e.getMessage()); } - rescoreElement = "{\n" + - " \"bad_fieldName\" : 20\n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"bad_fieldName\" : 20\n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("rescore doesn't support [bad_fieldName]", e.getMessage()); } rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"query\" : [ ]\n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"window_size\" : 20,\n" + + " \"query\" : [ ]\n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("unexpected token [START_ARRAY] after [query]", e.getMessage()); } rescoreElement = "{ }"; - { - XContentParser parser = createParser(rescoreElement); + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("missing rescore type", e.getMessage()); } rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"query\" : { \"bad_fieldname\" : 1.0 } \n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"window_size\" : 20,\n" + + " \"query\" : { \"bad_fieldname\" : 1.0 } \n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { XContentParseException e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("[3:17] [query] unknown field [bad_fieldname], parser not found", e.getMessage()); } rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"query\" : { \"rescore_query\" : { \"unknown_queryname\" : { } } } \n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"window_size\" : 20,\n" + + " \"query\" : { \"rescore_query\" : { \"unknown_queryname\" : { } } } \n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertThat(e.getMessage(), containsString("[query] failed to parse field [rescore_query]")); } rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"query\" : { \"rescore_query\" : { \"match_all\" : { } } } \n" - + "}\n"; - XContentParser parser = createParser(rescoreElement); - RescorerBuilder.parseFromXContent(parser); + " \"window_size\" : 20,\n" + + " \"query\" : { \"rescore_query\" : { \"match_all\" : { } } } \n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { + RescorerBuilder.parseFromXContent(parser); + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index 53bd9da2ff1de..f7457d965744a 100644 --- a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -136,11 +136,12 @@ private SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException { } jsonBuilder.endArray(); jsonBuilder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(jsonBuilder)); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - return SearchAfterBuilder.fromXContent(parser); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(jsonBuilder))) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + return SearchAfterBuilder.fromXContent(parser); + } } private static SearchAfterBuilder serializedCopy(SearchAfterBuilder original) throws IOException { @@ -174,14 +175,15 @@ public void testFromXContent() throws Exception { builder.startObject(); searchAfterBuilder.innerToXContent(builder); builder.endObject(); - XContentParser parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.fromXContent(parser); - assertNotSame(searchAfterBuilder, secondSearchAfterBuilder); - assertEquals(searchAfterBuilder, secondSearchAfterBuilder); - assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode()); + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.fromXContent(parser); + assertNotSame(searchAfterBuilder, secondSearchAfterBuilder); + assertEquals(searchAfterBuilder, secondSearchAfterBuilder); + assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java index b93ebc1adde72..30ed0cb5ab5b5 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java @@ -306,11 +306,12 @@ public void testFromXContent() throws Exception { builder.startObject(); sliceBuilder.innerToXContent(builder); builder.endObject(); - XContentParser parser = createParser(shuffleXContent(builder)); - SliceBuilder secondSliceBuilder = SliceBuilder.fromXContent(parser); - assertNotSame(sliceBuilder, secondSliceBuilder); - assertEquals(sliceBuilder, secondSliceBuilder); - assertEquals(sliceBuilder.hashCode(), secondSliceBuilder.hashCode()); + try (XContentParser parser = createParser(shuffleXContent(builder))) { + SliceBuilder secondSliceBuilder = SliceBuilder.fromXContent(parser); + assertNotSame(sliceBuilder, secondSliceBuilder); + assertEquals(sliceBuilder, secondSliceBuilder); + assertEquals(sliceBuilder.hashCode(), secondSliceBuilder.hashCode()); + } } public void testInvalidArguments() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index d05ddf4ee640e..2285af3ec46c0 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -121,21 +121,22 @@ public void testFromXContent() throws IOException { } testItem.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser itemParser = createParser(shuffled); - itemParser.nextToken(); - - /* - * filter out name of sort, or field name to sort on for element fieldSort - */ - itemParser.nextToken(); - String elementName = itemParser.currentName(); - itemParser.nextToken(); - - T parsedItem = fromXContent(itemParser, elementName); - assertNotSame(testItem, parsedItem); - assertEquals(testItem, parsedItem); - assertEquals(testItem.hashCode(), parsedItem.hashCode()); - assertWarnings(testItem); + try (XContentParser itemParser = createParser(shuffled)) { + itemParser.nextToken(); + + /* + * filter out name of sort, or field name to sort on for element fieldSort + */ + itemParser.nextToken(); + String elementName = itemParser.currentName(); + itemParser.nextToken(); + + T parsedItem = fromXContent(itemParser, elementName); + assertNotSame(testItem, parsedItem); + assertEquals(testItem, parsedItem); + assertEquals(testItem.hashCode(), parsedItem.hashCode()); + assertWarnings(testItem); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index 6aceed996ccdc..268f4aeb26d65 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -304,14 +304,15 @@ public void testBuildNested() throws IOException { public void testUnknownOptionFails() throws IOException { String json = "{ \"post_date\" : {\"reverse\" : true} },\n"; - XContentParser parser = createParser(JsonXContent.jsonXContent, json); - // need to skip until parser is located on second START_OBJECT - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - - XContentParseException e = expectThrows(XContentParseException.class, () -> FieldSortBuilder.fromXContent(parser, "")); - assertEquals("[1:18] [field_sort] unknown field [reverse], parser not found", e.getMessage()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) { + // need to skip until parser is located on second START_OBJECT + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + + XContentParseException e = expectThrows(XContentParseException.class, () -> FieldSortBuilder.fromXContent(parser, "")); + assertEquals("[1:18] [field_sort] unknown field [reverse], parser not found", e.getMessage()); + } } @Override diff --git a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index b70a87ea9860f..7ffedbf43ec2c 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -232,12 +232,13 @@ public void testSortModeSumIsRejectedInJSON() throws IOException { " \"distance_type\" : \"arc\",\n" + " \"mode\" : \"SUM\"\n" + "}"; - XContentParser itemParser = createParser(JsonXContent.jsonXContent, json); - itemParser.nextToken(); + try (XContentParser itemParser = createParser(JsonXContent.jsonXContent, json)) { + itemParser.nextToken(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> GeoDistanceSortBuilder.fromXContent(itemParser, "")); - assertEquals("sort_mode [sum] isn't supported for sorting by geo distance", e.getMessage()); + assertEquals("sort_mode [sum] isn't supported for sorting by geo distance", e.getMessage()); + } } public void testGeoDistanceSortCanBeParsedFromGeoHash() throws IOException { @@ -258,16 +259,17 @@ public void testGeoDistanceSortCanBeParsedFromGeoHash() throws IOException { " },\n" + " \"validation_method\" : \"STRICT\"\n" + " }"; - XContentParser itemParser = createParser(JsonXContent.jsonXContent, json); - itemParser.nextToken(); + try (XContentParser itemParser = createParser(JsonXContent.jsonXContent, json)) { + itemParser.nextToken(); - GeoDistanceSortBuilder result = GeoDistanceSortBuilder.fromXContent(itemParser, json); - assertEquals("[-19.700583312660456, -2.8225036337971687, " + GeoDistanceSortBuilder result = GeoDistanceSortBuilder.fromXContent(itemParser, json); + assertEquals("[-19.700583312660456, -2.8225036337971687, " + "31.537466906011105, -74.63590376079082, " + "43.71844606474042, -5.548660643398762, " + "-37.20467280596495, 38.71751043945551, " + "-69.44606635719538, 84.25200328230858, " + "-39.03717711567879, 44.74099852144718]", Arrays.toString(result.points())); + } } public void testGeoDistanceSortParserManyPointsNoException() throws Exception { @@ -380,9 +382,10 @@ public void testGeoDistanceSortDeprecatedSortModeException() throws Exception { } private GeoDistanceSortBuilder parse(XContentBuilder sortBuilder) throws Exception { - XContentParser parser = createParser(sortBuilder); - parser.nextToken(); - return GeoDistanceSortBuilder.fromXContent(parser, null); + try (XContentParser parser = createParser(sortBuilder)) { + parser.nextToken(); + return GeoDistanceSortBuilder.fromXContent(parser, null); + } } @Override diff --git a/server/src/test/java/org/elasticsearch/search/sort/NestedSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/NestedSortBuilderTests.java index 0908d83896f92..b0613b320b86a 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/NestedSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/NestedSortBuilderTests.java @@ -73,12 +73,13 @@ public void testFromXContent() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); testItem.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser parser = createParser(shuffled); - parser.nextToken(); - NestedSortBuilder parsedItem = NestedSortBuilder.fromXContent(parser); - assertNotSame(testItem, parsedItem); - assertEquals(testItem, parsedItem); - assertEquals(testItem.hashCode(), parsedItem.hashCode()); + try (XContentParser parser = createParser(shuffled)) { + parser.nextToken(); + NestedSortBuilder parsedItem = NestedSortBuilder.fromXContent(parser); + assertNotSame(testItem, parsedItem); + assertEquals(testItem, parsedItem); + assertEquals(testItem.hashCode(), parsedItem.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 9a030cc3aabcb..0f19b709a4fed 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -177,20 +177,21 @@ public void testParseJson() throws IOException { "\"mode\" : \"max\",\n" + "\"order\" : \"asc\"\n" + "} }\n"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - - ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(parser, null); - assertEquals("doc['field_name'].value * factor", builder.script().getIdOrCode()); - assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang()); - assertEquals(1.1, builder.script().getParams().get("factor")); - assertEquals(ScriptType.INLINE, builder.script().getType()); - assertEquals(ScriptSortType.NUMBER, builder.type()); - assertEquals(SortOrder.ASC, builder.order()); - assertEquals(SortMode.MAX, builder.sortMode()); - assertNull(builder.getNestedSort()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + + ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(parser, null); + assertEquals("doc['field_name'].value * factor", builder.script().getIdOrCode()); + assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang()); + assertEquals(1.1, builder.script().getParams().get("factor")); + assertEquals(ScriptType.INLINE, builder.script().getType()); + assertEquals(ScriptSortType.NUMBER, builder.type()); + assertEquals(SortOrder.ASC, builder.order()); + assertEquals(SortMode.MAX, builder.sortMode()); + assertNull(builder.getNestedSort()); + } } public void testParseJson_simple() throws IOException { @@ -201,54 +202,58 @@ public void testParseJson_simple() throws IOException { "\"mode\" : \"max\",\n" + "\"order\" : \"asc\"\n" + "} }\n"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - - ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(parser, null); - assertEquals("doc['field_name'].value", builder.script().getIdOrCode()); - assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang()); - assertEquals(builder.script().getParams(), Collections.emptyMap()); - assertEquals(ScriptType.INLINE, builder.script().getType()); - assertEquals(ScriptSortType.NUMBER, builder.type()); - assertEquals(SortOrder.ASC, builder.order()); - assertEquals(SortMode.MAX, builder.sortMode()); - assertNull(builder.getNestedSort()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + + ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(parser, null); + assertEquals("doc['field_name'].value", builder.script().getIdOrCode()); + assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang()); + assertEquals(builder.script().getParams(), Collections.emptyMap()); + assertEquals(ScriptType.INLINE, builder.script().getType()); + assertEquals(ScriptSortType.NUMBER, builder.type()); + assertEquals(SortOrder.ASC, builder.order()); + assertEquals(SortMode.MAX, builder.sortMode()); + assertNull(builder.getNestedSort()); + } } public void testParseBadFieldNameExceptions() throws IOException { String scriptSort = "{\"_script\" : {" + "\"bad_field\" : \"number\"" + "} }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); - XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); - assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); + assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); + } } public void testParseBadFieldNameExceptionsOnStartObject() throws IOException { String scriptSort = "{\"_script\" : {" + "\"bad_field\" : { \"order\" : \"asc\" } } }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); - XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); - assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); + assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); + } } public void testParseUnexpectedToken() throws IOException { String scriptSort = "{\"_script\" : {" + "\"script\" : [ \"order\" : \"asc\" ] } }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); - Exception e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); - assertThat(e.getMessage(), containsString("[_script] script doesn't support values of type: START_ARRAY")); + Exception e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); + assertThat(e.getMessage(), containsString("[_script] script doesn't support values of type: START_ARRAY")); + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java index f267dec2a8623..5f5ea5e869450 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java @@ -252,12 +252,13 @@ protected NamedXContentRegistry xContentRegistry() { } private List> parseSort(String jsonString) throws IOException { - XContentParser itemParser = createParser(JsonXContent.jsonXContent, jsonString); + try (XContentParser itemParser = createParser(JsonXContent.jsonXContent, jsonString)) { - assertEquals(XContentParser.Token.START_OBJECT, itemParser.nextToken()); - assertEquals(XContentParser.Token.FIELD_NAME, itemParser.nextToken()); - assertEquals("sort", itemParser.currentName()); - itemParser.nextToken(); - return SortBuilder.fromXContent(itemParser); + assertEquals(XContentParser.Token.START_OBJECT, itemParser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, itemParser.nextToken()); + assertEquals("sort", itemParser.currentName()); + itemParser.nextToken(); + return SortBuilder.fromXContent(itemParser); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index eb31f19ad4e83..00a287f02528c 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -140,14 +140,15 @@ public void testFromXContent() throws IOException { xContentBuilder.endObject(); XContentBuilder shuffled = shuffleXContent(xContentBuilder, shuffleProtectedFields()); - XContentParser parser = createParser(shuffled); - // we need to skip the start object and the name, those will be parsed by outer SuggestBuilder - parser.nextToken(); + try (XContentParser parser = createParser(shuffled)) { + // we need to skip the start object and the name, those will be parsed by outer SuggestBuilder + parser.nextToken(); - SuggestionBuilder secondSuggestionBuilder = SuggestionBuilder.fromXContent(parser); - assertNotSame(suggestionBuilder, secondSuggestionBuilder); - assertEquals(suggestionBuilder, secondSuggestionBuilder); - assertEquals(suggestionBuilder.hashCode(), secondSuggestionBuilder.hashCode()); + SuggestionBuilder secondSuggestionBuilder = SuggestionBuilder.fromXContent(parser); + assertNotSame(suggestionBuilder, secondSuggestionBuilder); + assertEquals(suggestionBuilder, secondSuggestionBuilder); + assertEquals(suggestionBuilder.hashCode(), secondSuggestionBuilder.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java index 16de2a3506740..2b99c62185b7c 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java @@ -74,11 +74,12 @@ public void testFromXContent() throws IOException { xContentBuilder.prettyPrint(); } suggestBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(xContentBuilder); - SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(parser); - assertNotSame(suggestBuilder, secondSuggestBuilder); - assertEquals(suggestBuilder, secondSuggestBuilder); - assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode()); + try (XContentParser parser = createParser(xContentBuilder)) { + SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(parser); + assertNotSame(suggestBuilder, secondSuggestBuilder); + assertEquals(suggestBuilder, secondSuggestBuilder); + assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 6ebced51e1ea1..3a7451e78fb4f 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -368,44 +368,48 @@ public void testIndexingWithMultipleContexts() throws Exception { public void testQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value("context1"); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + } } public void testBooleanQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value(true); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("true")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("true")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + } } public void testNumberQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value(10); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("10")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("10")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + } } public void testNULLQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().nullValue(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + } } public void testQueryContextParsingArray() throws Exception { @@ -413,16 +417,17 @@ public void testQueryContextParsingArray() throws Exception { .value("context1") .value("context2") .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(2)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(1)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(2)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(1)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + } } public void testQueryContextParsingMixedTypeValuesArray() throws Exception { @@ -432,22 +437,23 @@ public void testQueryContextParsingMixedTypeValuesArray() throws Exception { .value(true) .value(10) .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(4)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(1)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(2).context, equalTo("true")); - assertThat(internalQueryContexts.get(2).boost, equalTo(1)); - assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(3).context, equalTo("10")); - assertThat(internalQueryContexts.get(3).boost, equalTo(1)); - assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(4)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(1)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(2).context, equalTo("true")); + assertThat(internalQueryContexts.get(2).boost, equalTo(1)); + assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(3).context, equalTo("10")); + assertThat(internalQueryContexts.get(3).boost, equalTo(1)); + assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false)); + } } public void testQueryContextParsingMixedTypeValuesArrayHavingNULL() throws Exception { @@ -458,11 +464,12 @@ public void testQueryContextParsingMixedTypeValuesArrayHavingNULL() throws Excep .value(10) .nullValue() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + } } public void testQueryContextParsingObject() throws Exception { @@ -471,13 +478,14 @@ public void testQueryContextParsingObject() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(10)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(10)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + } } public void testQueryContextParsingObjectHavingBoolean() throws Exception { @@ -486,13 +494,14 @@ public void testQueryContextParsingObjectHavingBoolean() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("false")); - assertThat(internalQueryContexts.get(0).boost, equalTo(10)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("false")); + assertThat(internalQueryContexts.get(0).boost, equalTo(10)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + } } public void testQueryContextParsingObjectHavingNumber() throws Exception { @@ -501,13 +510,14 @@ public void testQueryContextParsingObjectHavingNumber() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("333")); - assertThat(internalQueryContexts.get(0).boost, equalTo(10)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("333")); + assertThat(internalQueryContexts.get(0).boost, equalTo(10)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + } } public void testQueryContextParsingObjectHavingNULL() throws Exception { @@ -516,11 +526,12 @@ public void testQueryContextParsingObjectHavingNULL() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - Exception e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(e.getMessage(), containsString("category context must be a string, number or boolean")); + Exception e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(e.getMessage(), containsString("category context must be a string, number or boolean")); + } } public void testQueryContextParsingObjectArray() throws Exception { @@ -536,16 +547,17 @@ public void testQueryContextParsingObjectArray() throws Exception { .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(2)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(2)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(3)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(2)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(2)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(3)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + } } public void testQueryContextParsingMixedTypeObjectArray() throws Exception { @@ -571,22 +583,23 @@ public void testQueryContextParsingMixedTypeObjectArray() throws Exception { .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(4)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(2)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(3)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(2).context, equalTo("true")); - assertThat(internalQueryContexts.get(2).boost, equalTo(3)); - assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(3).context, equalTo("333")); - assertThat(internalQueryContexts.get(3).boost, equalTo(3)); - assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(4)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(2)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(3)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(2).context, equalTo("true")); + assertThat(internalQueryContexts.get(2).boost, equalTo(3)); + assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(3).context, equalTo("333")); + assertThat(internalQueryContexts.get(3).boost, equalTo(3)); + assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false)); + } } public void testQueryContextParsingMixedTypeObjectArrayHavingNULL() throws Exception { @@ -617,11 +630,12 @@ public void testQueryContextParsingMixedTypeObjectArrayHavingNULL() throws Excep .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be a string, number or boolean")); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be a string, number or boolean")); + } } @@ -640,22 +654,23 @@ public void testQueryContextParsingMixed() throws Exception { .field("prefix", true) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(4)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(2)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(1)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(2).context, equalTo("false")); - assertThat(internalQueryContexts.get(2).boost, equalTo(1)); - assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(3).context, equalTo("333")); - assertThat(internalQueryContexts.get(3).boost, equalTo(2)); - assertThat(internalQueryContexts.get(3).isPrefix, equalTo(true)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(4)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(2)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(1)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(2).context, equalTo("false")); + assertThat(internalQueryContexts.get(2).boost, equalTo(1)); + assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(3).context, equalTo("333")); + assertThat(internalQueryContexts.get(3).boost, equalTo(2)); + assertThat(internalQueryContexts.get(3).isPrefix, equalTo(true)); + } } public void testQueryContextParsingMixedHavingNULL() throws Exception { @@ -674,11 +689,12 @@ public void testQueryContextParsingMixedHavingNULL() throws Exception { .endObject() .nullValue() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + } } public void testUnknownQueryContextParsing() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index ebfac5f58ef77..925526323a540 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -124,12 +124,13 @@ public void testFromXContent() throws IOException { builder.prettyPrint(); } generator.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - DirectCandidateGeneratorBuilder secondGenerator = DirectCandidateGeneratorBuilder.PARSER.apply(parser, null); - assertNotSame(generator, secondGenerator); - assertEquals(generator, secondGenerator); - assertEquals(generator.hashCode(), secondGenerator.hashCode()); + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + DirectCandidateGeneratorBuilder secondGenerator = DirectCandidateGeneratorBuilder.PARSER.apply(parser, null); + assertNotSame(generator, secondGenerator); + assertEquals(generator, secondGenerator); + assertEquals(generator.hashCode(), secondGenerator.hashCode()); + } } } @@ -187,9 +188,10 @@ public void testIllegalXContent() throws IOException { private void assertIllegalXContent(String directGenerator, Class exceptionClass, String exceptionMsg) throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, directGenerator); - Exception e = expectThrows(exceptionClass, () -> DirectCandidateGeneratorBuilder.PARSER.apply(parser, null)); - assertThat(e.getMessage(), containsString(exceptionMsg)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, directGenerator)) { + Exception e = expectThrows(exceptionClass, () -> DirectCandidateGeneratorBuilder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString(exceptionMsg)); + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java index e75d01739ccb8..5923cd3332e5e 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -95,12 +95,13 @@ public void testFromXContent() throws IOException { contentBuilder.startObject(); testModel.innerToXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); - XContentParser parser = createParser(shuffleXContent(contentBuilder)); - parser.nextToken(); // go to start token, real parsing would do that in the outer element parser - SmoothingModel parsedModel = fromXContent(parser); - assertNotSame(testModel, parsedModel); - assertEquals(testModel, parsedModel); - assertEquals(testModel.hashCode(), parsedModel.hashCode()); + try (XContentParser parser = createParser(shuffleXContent(contentBuilder))) { + parser.nextToken(); // go to start token, real parsing would do that in the outer element parser + SmoothingModel parsedModel = fromXContent(parser); + assertNotSame(testModel, parsedModel); + assertEquals(testModel, parsedModel); + assertEquals(testModel.hashCode(), parsedModel.hashCode()); + } } /** From 8b698f0bce8c73cb6d8510ce6a3f857cb0fb2f02 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Mon, 25 Jun 2018 10:12:31 +0200 Subject: [PATCH 90/92] turn GetFieldMappingsResponse to ToXContentObject (#31544) --- .../admin/indices/mapping/get/GetFieldMappingsResponse.java | 5 ++++- .../rest/action/admin/indices/RestGetFieldMappingAction.java | 2 -- .../indices/mapping/SimpleGetFieldMappingsIT.java | 4 ---- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index 81b9812d61c5f..44a66f497c846 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -47,7 +48,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; /** Response object for {@link GetFieldMappingsRequest} API */ -public class GetFieldMappingsResponse extends ActionResponse implements ToXContentFragment { +public class GetFieldMappingsResponse extends ActionResponse implements ToXContentObject { private static final ParseField MAPPINGS = new ParseField("mappings"); @@ -111,6 +112,7 @@ public FieldMappingMetaData fieldMappings(String index, String type, String fiel @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); for (Map.Entry>> indexEntry : mappings.entrySet()) { builder.startObject(indexEntry.getKey()); builder.startObject(MAPPINGS.getPreferredName()); @@ -126,6 +128,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); builder.endObject(); } + builder.endObject(); return builder; } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java index ea68d9cc3c04f..c43f14dcddf26 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -81,9 +81,7 @@ public RestResponse buildResponse(GetFieldMappingsResponse response, XContentBui if (mappingsByIndex.isEmpty() && fields.length > 0) { status = NOT_FOUND; } - builder.startObject(); response.toXContent(builder, request); - builder.endObject(); return new BytesRestResponse(status, builder); } }); diff --git a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java index f5cac445b220d..2ba943ba0dc4b 100644 --- a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -149,9 +149,7 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { params.put("pretty", "true"); GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("field1", "obj.subfield").get(); XContentBuilder responseBuilder = XContentFactory.jsonBuilder().prettyPrint(); - responseBuilder.startObject(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); - responseBuilder.endObject(); String responseStrings = Strings.toString(responseBuilder); @@ -163,9 +161,7 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { response = client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("field1", "obj.subfield").get(); responseBuilder = XContentFactory.jsonBuilder().prettyPrint().lfAtEnd(); - responseBuilder.startObject(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); - responseBuilder.endObject(); responseStrings = Strings.toString(responseBuilder); prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint(); From 8e4768890abbd2fb6fbc49d6188909ed7ca6961b Mon Sep 17 00:00:00 2001 From: Jonathan Little Date: Mon, 25 Jun 2018 04:01:33 -0700 Subject: [PATCH 91/92] Migrate scripted metric aggregation scripts to ScriptContext design (#30111) * Migrate scripted metric aggregation scripts to ScriptContext design #29328 * Rename new script context container class and add clarifying comments to remaining references to params._agg(s) * Misc cleanup: make mock metric agg script inner classes static * Move _score to an accessor rather than an arg for scripted metric agg scripts This causes the score to be evaluated only when it's used. * Documentation changes for params._agg -> agg * Migration doc addition for scripted metric aggs _agg object change * Rename "agg" Scripted Metric Aggregation script context variable to "state" * Rename a private base class from ...Agg to ...State that I missed in my last commit * Clean up imports after merge --- .../scripted-metric-aggregation.asciidoc | 18 +- .../scripted-metric-aggregation.asciidoc | 64 +++---- .../migrate_7_0/aggregations.asciidoc | 10 +- .../ScriptedMetricAggContextsTests.java | 126 ++++++++++++++ .../elasticsearch/script/ScriptModule.java | 6 +- .../script/ScriptedMetricAggContexts.java | 161 ++++++++++++++++++ .../scripted/InternalScriptedMetric.java | 19 ++- .../ScriptedMetricAggregationBuilder.java | 23 +-- .../scripted/ScriptedMetricAggregator.java | 36 ++-- .../ScriptedMetricAggregatorFactory.java | 38 +++-- .../metrics/ScriptedMetricIT.java | 82 ++++++++- .../ScriptedMetricAggregatorTests.java | 3 +- .../script/MockScriptEngine.java | 144 +++++++++++++++- 13 files changed, 619 insertions(+), 111 deletions(-) create mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java create mode 100644 server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java diff --git a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc index b23a683b05610..5b68fa7be451f 100644 --- a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -13,8 +13,8 @@ Here is an example on how to create the aggregation request: -------------------------------------------------- ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript(new Script("params._agg.heights = []")) - .mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")); + .initScript(new Script("state.heights = []")) + .mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")); -------------------------------------------------- You can also specify a `combine` script which will be executed on each shard: @@ -23,9 +23,9 @@ You can also specify a `combine` script which will be executed on each shard: -------------------------------------------------- ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript(new Script("params._agg.heights = []")) - .mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) - .combineScript(new Script("double heights_sum = 0.0; for (t in params._agg.heights) { heights_sum += t } return heights_sum")); + .initScript(new Script("state.heights = []")) + .mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) + .combineScript(new Script("double heights_sum = 0.0; for (t in state.heights) { heights_sum += t } return heights_sum")); -------------------------------------------------- You can also specify a `reduce` script which will be executed on the node which gets the request: @@ -34,10 +34,10 @@ You can also specify a `reduce` script which will be executed on the node which -------------------------------------------------- ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript(new Script("params._agg.heights = []")) - .mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) - .combineScript(new Script("double heights_sum = 0.0; for (t in params._agg.heights) { heights_sum += t } return heights_sum")) - .reduceScript(new Script("double heights_sum = 0.0; for (a in params._aggs) { heights_sum += a } return heights_sum")); + .initScript(new Script("state.heights = []")) + .mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) + .combineScript(new Script("double heights_sum = 0.0; for (t in state.heights) { heights_sum += t } return heights_sum")) + .reduceScript(new Script("double heights_sum = 0.0; for (a in states) { heights_sum += a } return heights_sum")); -------------------------------------------------- diff --git a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc index 1a4d6d4774c49..c4857699f9805 100644 --- a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -15,10 +15,10 @@ POST ledger/_search?size=0 "aggs": { "profit": { "scripted_metric": { - "init_script" : "params._agg.transactions = []", - "map_script" : "params._agg.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)", <1> - "combine_script" : "double profit = 0; for (t in params._agg.transactions) { profit += t } return profit", - "reduce_script" : "double profit = 0; for (a in params._aggs) { profit += a } return profit" + "init_script" : "state.transactions = []", + "map_script" : "state.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)", <1> + "combine_script" : "double profit = 0; for (t in state.transactions) { profit += t } return profit", + "reduce_script" : "double profit = 0; for (a in states) { profit += a } return profit" } } } @@ -67,8 +67,7 @@ POST ledger/_search?size=0 "id": "my_combine_script" }, "params": { - "field": "amount", <1> - "_agg": {} <2> + "field": "amount" <1> }, "reduce_script" : { "id": "my_reduce_script" @@ -82,8 +81,7 @@ POST ledger/_search?size=0 // TEST[setup:ledger,stored_scripted_metric_script] <1> script parameters for `init`, `map` and `combine` scripts must be specified -in a global `params` object so that it can be share between the scripts. -<2> if you specify script parameters then you must specify `"_agg": {}`. +in a global `params` object so that it can be shared between the scripts. //// Verify this response as well but in a hidden block. @@ -108,7 +106,7 @@ For more details on specifying scripts see <, List> scriptContexts() { + Map, List> contexts = new HashMap<>(); + contexts.put(ScriptedMetricAggContexts.InitScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.MapScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.CombineScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.ReduceScript.CONTEXT, Whitelist.BASE_WHITELISTS); + return contexts; + } + + public void testInitBasic() { + ScriptedMetricAggContexts.InitScript.Factory factory = scriptEngine.compile("test", + "state.testField = params.initialVal", ScriptedMetricAggContexts.InitScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + params.put("initialVal", 10); + + ScriptedMetricAggContexts.InitScript script = factory.newInstance(params, state); + script.execute(); + + assert(state.containsKey("testField")); + assertEquals(10, state.get("testField")); + } + + public void testMapBasic() { + ScriptedMetricAggContexts.MapScript.Factory factory = scriptEngine.compile("test", + "state.testField = 2*_score", ScriptedMetricAggContexts.MapScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + Scorer scorer = new Scorer(null) { + @Override + public int docID() { return 0; } + + @Override + public float score() { return 0.5f; } + + @Override + public DocIdSetIterator iterator() { return null; } + }; + + ScriptedMetricAggContexts.MapScript.LeafFactory leafFactory = factory.newFactory(params, state, null); + ScriptedMetricAggContexts.MapScript script = leafFactory.newInstance(null); + + script.setScorer(scorer); + script.execute(); + + assert(state.containsKey("testField")); + assertEquals(1.0, state.get("testField")); + } + + public void testCombineBasic() { + ScriptedMetricAggContexts.CombineScript.Factory factory = scriptEngine.compile("test", + "state.testField = params.initialVal; return state.testField + params.inc", ScriptedMetricAggContexts.CombineScript.CONTEXT, + Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + params.put("initialVal", 10); + params.put("inc", 2); + + ScriptedMetricAggContexts.CombineScript script = factory.newInstance(params, state); + Object res = script.execute(); + + assert(state.containsKey("testField")); + assertEquals(10, state.get("testField")); + assertEquals(12, res); + } + + public void testReduceBasic() { + ScriptedMetricAggContexts.ReduceScript.Factory factory = scriptEngine.compile("test", + "states[0].testField + states[1].testField", ScriptedMetricAggContexts.ReduceScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + List states = new ArrayList<>(); + + Map state1 = new HashMap<>(), state2 = new HashMap<>(); + state1.put("testField", 1); + state2.put("testField", 2); + + states.add(state1); + states.add(state2); + + ScriptedMetricAggContexts.ReduceScript script = factory.newInstance(params, states); + Object res = script.execute(); + assertEquals(3, res); + } +} diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index 7074d3ad9fe44..f0e075eac7d93 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -53,7 +53,11 @@ public class ScriptModule { SimilarityScript.CONTEXT, SimilarityWeightScript.CONTEXT, TemplateScript.CONTEXT, - MovingFunctionScript.CONTEXT + MovingFunctionScript.CONTEXT, + ScriptedMetricAggContexts.InitScript.CONTEXT, + ScriptedMetricAggContexts.MapScript.CONTEXT, + ScriptedMetricAggContexts.CombineScript.CONTEXT, + ScriptedMetricAggContexts.ReduceScript.CONTEXT ).collect(Collectors.toMap(c -> c.name, Function.identity())); } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java new file mode 100644 index 0000000000000..774dc95d39977 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java @@ -0,0 +1,161 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorer; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.search.lookup.LeafSearchLookup; +import org.elasticsearch.search.lookup.SearchLookup; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class ScriptedMetricAggContexts { + private abstract static class ParamsAndStateBase { + private final Map params; + private final Object state; + + ParamsAndStateBase(Map params, Object state) { + this.params = params; + this.state = state; + } + + public Map getParams() { + return params; + } + + public Object getState() { + return state; + } + } + + public abstract static class InitScript extends ParamsAndStateBase { + public InitScript(Map params, Object state) { + super(params, state); + } + + public abstract void execute(); + + public interface Factory { + InitScript newInstance(Map params, Object state); + } + + public static String[] PARAMETERS = {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_init", Factory.class); + } + + public abstract static class MapScript extends ParamsAndStateBase { + private final LeafSearchLookup leafLookup; + private Scorer scorer; + + public MapScript(Map params, Object state, SearchLookup lookup, LeafReaderContext leafContext) { + super(params, state); + + this.leafLookup = leafContext == null ? null : lookup.getLeafSearchLookup(leafContext); + } + + // Return the doc as a map (instead of LeafDocLookup) in order to abide by type whitelisting rules for + // Painless scripts. + public Map> getDoc() { + return leafLookup == null ? null : leafLookup.doc(); + } + + public void setDocument(int docId) { + if (leafLookup != null) { + leafLookup.setDocument(docId); + } + } + + public void setScorer(Scorer scorer) { + this.scorer = scorer; + } + + // get_score() is named this way so that it's picked up by Painless as '_score' + public double get_score() { + if (scorer == null) { + return 0.0; + } + + try { + return scorer.score(); + } catch (IOException e) { + throw new ElasticsearchException("Couldn't look up score", e); + } + } + + public abstract void execute(); + + public interface LeafFactory { + MapScript newInstance(LeafReaderContext ctx); + } + + public interface Factory { + LeafFactory newFactory(Map params, Object state, SearchLookup lookup); + } + + public static String[] PARAMETERS = new String[] {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_map", Factory.class); + } + + public abstract static class CombineScript extends ParamsAndStateBase { + public CombineScript(Map params, Object state) { + super(params, state); + } + + public abstract Object execute(); + + public interface Factory { + CombineScript newInstance(Map params, Object state); + } + + public static String[] PARAMETERS = {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_combine", Factory.class); + } + + public abstract static class ReduceScript { + private final Map params; + private final List states; + + public ReduceScript(Map params, List states) { + this.params = params; + this.states = states; + } + + public Map getParams() { + return params; + } + + public List getStates() { + return states; + } + + public abstract Object execute(); + + public interface Factory { + ReduceScript newInstance(Map params, List states); + } + + public static String[] PARAMETERS = {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_reduce", Factory.class); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java index e350ecbed5814..f4281c063ff2c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -90,16 +90,19 @@ public InternalAggregation doReduce(List aggregations, Redu InternalScriptedMetric firstAggregation = ((InternalScriptedMetric) aggregations.get(0)); List aggregation; if (firstAggregation.reduceScript != null && reduceContext.isFinalReduce()) { - Map vars = new HashMap<>(); - vars.put("_aggs", aggregationObjects); + Map params = new HashMap<>(); if (firstAggregation.reduceScript.getParams() != null) { - vars.putAll(firstAggregation.reduceScript.getParams()); + params.putAll(firstAggregation.reduceScript.getParams()); } - ExecutableScript.Factory factory = reduceContext.scriptService().compile( - firstAggregation.reduceScript, ExecutableScript.AGGS_CONTEXT); - ExecutableScript script = factory.newInstance(vars); - Object scriptResult = script.run(); + // Add _aggs to params map for backwards compatibility (redundant with a context variable on the ReduceScript created below). + params.put("_aggs", aggregationObjects); + + ScriptedMetricAggContexts.ReduceScript.Factory factory = reduceContext.scriptService().compile( + firstAggregation.reduceScript, ScriptedMetricAggContexts.ReduceScript.CONTEXT); + ScriptedMetricAggContexts.ReduceScript script = factory.newInstance(params, aggregationObjects); + + Object scriptResult = script.execute(); CollectionUtils.ensureNoSelfReferences(scriptResult, "reduce script"); aggregation = Collections.singletonList(scriptResult); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java index 225398e51b7c0..8b6d834184d73 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java @@ -26,9 +26,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; @@ -202,30 +201,32 @@ protected ScriptedMetricAggregatorFactory doBuild(SearchContext context, Aggrega // Extract params from scripts and pass them along to ScriptedMetricAggregatorFactory, since it won't have // access to them for the scripts it's given precompiled. - ExecutableScript.Factory executableInitScript; + ScriptedMetricAggContexts.InitScript.Factory compiledInitScript; Map initScriptParams; if (initScript != null) { - executableInitScript = queryShardContext.getScriptService().compile(initScript, ExecutableScript.AGGS_CONTEXT); + compiledInitScript = queryShardContext.getScriptService().compile(initScript, ScriptedMetricAggContexts.InitScript.CONTEXT); initScriptParams = initScript.getParams(); } else { - executableInitScript = p -> null; + compiledInitScript = (p, a) -> null; initScriptParams = Collections.emptyMap(); } - SearchScript.Factory searchMapScript = queryShardContext.getScriptService().compile(mapScript, SearchScript.AGGS_CONTEXT); + ScriptedMetricAggContexts.MapScript.Factory compiledMapScript = queryShardContext.getScriptService().compile(mapScript, + ScriptedMetricAggContexts.MapScript.CONTEXT); Map mapScriptParams = mapScript.getParams(); - ExecutableScript.Factory executableCombineScript; + ScriptedMetricAggContexts.CombineScript.Factory compiledCombineScript; Map combineScriptParams; if (combineScript != null) { - executableCombineScript = queryShardContext.getScriptService().compile(combineScript, ExecutableScript.AGGS_CONTEXT); + compiledCombineScript = queryShardContext.getScriptService().compile(combineScript, + ScriptedMetricAggContexts.CombineScript.CONTEXT); combineScriptParams = combineScript.getParams(); } else { - executableCombineScript = p -> null; + compiledCombineScript = (p, a) -> null; combineScriptParams = Collections.emptyMap(); } - return new ScriptedMetricAggregatorFactory(name, searchMapScript, mapScriptParams, executableInitScript, initScriptParams, - executableCombineScript, combineScriptParams, reduceScript, + return new ScriptedMetricAggregatorFactory(name, compiledMapScript, mapScriptParams, compiledInitScript, + initScriptParams, compiledCombineScript, combineScriptParams, reduceScript, params, queryShardContext.lookup(), context, parent, subfactoriesBuilder, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java index d6e861a9a6792..ffdff44b783b6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java @@ -20,10 +20,10 @@ package org.elasticsearch.search.aggregations.metrics.scripted; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorer; import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; @@ -38,17 +38,17 @@ public class ScriptedMetricAggregator extends MetricsAggregator { - private final SearchScript.LeafFactory mapScript; - private final ExecutableScript combineScript; + private final ScriptedMetricAggContexts.MapScript.LeafFactory mapScript; + private final ScriptedMetricAggContexts.CombineScript combineScript; private final Script reduceScript; - private Map params; + private Object aggState; - protected ScriptedMetricAggregator(String name, SearchScript.LeafFactory mapScript, ExecutableScript combineScript, - Script reduceScript, - Map params, SearchContext context, Aggregator parent, List pipelineAggregators, Map metaData) - throws IOException { + protected ScriptedMetricAggregator(String name, ScriptedMetricAggContexts.MapScript.LeafFactory mapScript, ScriptedMetricAggContexts.CombineScript combineScript, + Script reduceScript, Object aggState, SearchContext context, Aggregator parent, + List pipelineAggregators, Map metaData) + throws IOException { super(name, context, parent, pipelineAggregators, metaData); - this.params = params; + this.aggState = aggState; this.mapScript = mapScript; this.combineScript = combineScript; this.reduceScript = reduceScript; @@ -62,14 +62,20 @@ public boolean needsScores() { @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { - final SearchScript leafMapScript = mapScript.newInstance(ctx); + final ScriptedMetricAggContexts.MapScript leafMapScript = mapScript.newInstance(ctx); return new LeafBucketCollectorBase(sub, leafMapScript) { + @Override + public void setScorer(Scorer scorer) throws IOException { + leafMapScript.setScorer(scorer); + } + @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0 : bucket; + leafMapScript.setDocument(doc); - leafMapScript.run(); - CollectionUtils.ensureNoSelfReferences(params, "Scripted metric aggs map script"); + leafMapScript.execute(); + CollectionUtils.ensureNoSelfReferences(aggState, "Scripted metric aggs map script"); } }; } @@ -78,10 +84,10 @@ public void collect(int doc, long bucket) throws IOException { public InternalAggregation buildAggregation(long owningBucketOrdinal) { Object aggregation; if (combineScript != null) { - aggregation = combineScript.run(); + aggregation = combineScript.execute(); CollectionUtils.ensureNoSelfReferences(aggregation, "Scripted metric aggs combine script"); } else { - aggregation = params.get("_agg"); + aggregation = aggState; } return new InternalScriptedMetric(name, aggregation, reduceScript, pipelineAggregators(), metaData()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java index 0deda32e79d77..9bd904a07013d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java @@ -19,10 +19,9 @@ package org.elasticsearch.search.aggregations.metrics.scripted; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -39,20 +38,21 @@ public class ScriptedMetricAggregatorFactory extends AggregatorFactory { - private final SearchScript.Factory mapScript; + private final ScriptedMetricAggContexts.MapScript.Factory mapScript; private final Map mapScriptParams; - private final ExecutableScript.Factory combineScript; + private final ScriptedMetricAggContexts.CombineScript.Factory combineScript; private final Map combineScriptParams; private final Script reduceScript; private final Map aggParams; private final SearchLookup lookup; - private final ExecutableScript.Factory initScript; + private final ScriptedMetricAggContexts.InitScript.Factory initScript; private final Map initScriptParams; - public ScriptedMetricAggregatorFactory(String name, SearchScript.Factory mapScript, Map mapScriptParams, - ExecutableScript.Factory initScript, Map initScriptParams, - ExecutableScript.Factory combineScript, Map combineScriptParams, - Script reduceScript, Map aggParams, + public ScriptedMetricAggregatorFactory(String name, + ScriptedMetricAggContexts.MapScript.Factory mapScript, Map mapScriptParams, + ScriptedMetricAggContexts.InitScript.Factory initScript, Map initScriptParams, + ScriptedMetricAggContexts.CombineScript.Factory combineScript, + Map combineScriptParams, Script reduceScript, Map aggParams, SearchLookup lookup, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactories, Map metaData) throws IOException { super(name, context, parent, subFactories, metaData); @@ -79,21 +79,29 @@ public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBu } else { aggParams = new HashMap<>(); } + + // Add _agg to params map for backwards compatibility (redundant with context variables on the scripts created below). + // When this is removed, aggState (as passed to ScriptedMetricAggregator) can be changed to Map, since + // it won't be possible to completely replace it with another type as is possible when it's an entry in params. if (aggParams.containsKey("_agg") == false) { aggParams.put("_agg", new HashMap()); } + Object aggState = aggParams.get("_agg"); - final ExecutableScript initScript = this.initScript.newInstance(mergeParams(aggParams, initScriptParams)); - final SearchScript.LeafFactory mapScript = this.mapScript.newFactory(mergeParams(aggParams, mapScriptParams), lookup); - final ExecutableScript combineScript = this.combineScript.newInstance(mergeParams(aggParams, combineScriptParams)); + final ScriptedMetricAggContexts.InitScript initScript = this.initScript.newInstance( + mergeParams(aggParams, initScriptParams), aggState); + final ScriptedMetricAggContexts.MapScript.LeafFactory mapScript = this.mapScript.newFactory( + mergeParams(aggParams, mapScriptParams), aggState, lookup); + final ScriptedMetricAggContexts.CombineScript combineScript = this.combineScript.newInstance( + mergeParams(aggParams, combineScriptParams), aggState); final Script reduceScript = deepCopyScript(this.reduceScript, context); if (initScript != null) { - initScript.run(); - CollectionUtils.ensureNoSelfReferences(aggParams.get("_agg"), "Scripted metric aggs init script"); + initScript.execute(); + CollectionUtils.ensureNoSelfReferences(aggState, "Scripted metric aggs init script"); } return new ScriptedMetricAggregator(name, mapScript, - combineScript, reduceScript, aggParams, context, parent, + combineScript, reduceScript, aggState, context, parent, pipelineAggregators, metaData); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 816c0464d95d9..13e1489795996 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -193,14 +193,55 @@ protected Map, Object>> pluginScripts() { return newAggregation; }); + scripts.put("state.items = new ArrayList()", vars -> + aggContextScript(vars, state -> ((HashMap) state).put("items", new ArrayList()))); + + scripts.put("state.items.add(1)", vars -> + aggContextScript(vars, state -> { + HashMap stateMap = (HashMap) state; + List items = (List) stateMap.get("items"); + items.add(1); + })); + + scripts.put("sum context state values", vars -> { + int sum = 0; + HashMap state = (HashMap) vars.get("state"); + List items = (List) state.get("items"); + + for (Object x : items) { + sum += (Integer)x; + } + + return sum; + }); + + scripts.put("sum context states", vars -> { + Integer sum = 0; + + List states = (List) vars.get("states"); + for (Object state : states) { + sum += ((Number) state).intValue(); + } + + return sum; + }); + return scripts; } - @SuppressWarnings("unchecked") static Object aggScript(Map vars, Consumer fn) { - T agg = (T) vars.get("_agg"); - fn.accept(agg); - return agg; + return aggScript(vars, fn, "_agg"); + } + + static Object aggContextScript(Map vars, Consumer fn) { + return aggScript(vars, fn, "state"); + } + + @SuppressWarnings("unchecked") + private static Object aggScript(Map vars, Consumer fn, String stateVarName) { + T aggState = (T) vars.get(stateVarName); + fn.accept(aggState); + return aggState; } } @@ -1015,4 +1056,37 @@ public void testConflictingAggAndScriptParams() { SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, builder::get); assertThat(ex.getCause().getMessage(), containsString("Parameter name \"param1\" used in both aggregation and script parameters")); } + + public void testAggFromContext() { + Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.items = new ArrayList()", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.items.add(1)", Collections.emptyMap()); + Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum context state values", Collections.emptyMap()); + Script reduceScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum context states", + Collections.emptyMap()); + + SearchResponse response = client() + .prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted") + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript)) + .get(); + + Aggregation aggregation = response.getAggregations().get("scripted"); + assertThat(aggregation, notNullValue()); + assertThat(aggregation, instanceOf(ScriptedMetric.class)); + + ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation; + assertThat(scriptedMetricAggregation.getName(), equalTo("scripted")); + assertThat(scriptedMetricAggregation.aggregation(), notNullValue()); + + assertThat(scriptedMetricAggregation.aggregation(), instanceOf(Integer.class)); + Integer aggResult = (Integer) scriptedMetricAggregation.aggregation(); + long totalAgg = aggResult.longValue(); + assertThat(totalAgg, equalTo(numDocs)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java index 7a7c66d21aada..b2a949ceeee1a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.script.MockScriptEngine; -import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptModule; @@ -107,7 +106,7 @@ public static void initMockScripts() { }); SCRIPTS.put("mapScriptScore", params -> { Map agg = (Map) params.get("_agg"); - ((List) agg.get("collector")).add(((ScoreAccessor) params.get("_score")).doubleValue()); + ((List) agg.get("collector")).add(((Number) params.get("_score")).doubleValue()); return agg; }); SCRIPTS.put("combineScriptScore", params -> { diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index b86cb9ff29352..e608bd13d2559 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.function.Function; @@ -115,6 +116,18 @@ public String execute() { } else if (context.instanceClazz.equals(ScoreScript.class)) { ScoreScript.Factory factory = new MockScoreScript(script); return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.InitScript.class)) { + ScriptedMetricAggContexts.InitScript.Factory factory = mockCompiled::createMetricAggInitScript; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.MapScript.class)) { + ScriptedMetricAggContexts.MapScript.Factory factory = mockCompiled::createMetricAggMapScript; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.CombineScript.class)) { + ScriptedMetricAggContexts.CombineScript.Factory factory = mockCompiled::createMetricAggCombineScript; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.ReduceScript.class)) { + ScriptedMetricAggContexts.ReduceScript.Factory factory = mockCompiled::createMetricAggReduceScript; + return context.factoryClazz.cast(factory); } throw new IllegalArgumentException("mock script engine does not know how to handle context [" + context.name + "]"); } @@ -179,6 +192,23 @@ public SimilarityWeightScript createSimilarityWeightScript() { public MovingFunctionScript createMovingFunctionScript() { return new MockMovingFunctionScript(); } + + public ScriptedMetricAggContexts.InitScript createMetricAggInitScript(Map params, Object state) { + return new MockMetricAggInitScript(params, state, script != null ? script : ctx -> 42d); + } + + public ScriptedMetricAggContexts.MapScript.LeafFactory createMetricAggMapScript(Map params, Object state, + SearchLookup lookup) { + return new MockMetricAggMapScript(params, state, lookup, script != null ? script : ctx -> 42d); + } + + public ScriptedMetricAggContexts.CombineScript createMetricAggCombineScript(Map params, Object state) { + return new MockMetricAggCombineScript(params, state, script != null ? script : ctx -> 42d); + } + + public ScriptedMetricAggContexts.ReduceScript createMetricAggReduceScript(Map params, List states) { + return new MockMetricAggReduceScript(params, states, script != null ? script : ctx -> 42d); + } } public class MockExecutableScript implements ExecutableScript { @@ -333,6 +363,108 @@ public double execute(Query query, Field field, Term term) throws IOException { } } + public static class MockMetricAggInitScript extends ScriptedMetricAggContexts.InitScript { + private final Function, Object> script; + + MockMetricAggInitScript(Map params, Object state, + Function, Object> script) { + super(params, state); + this.script = script; + } + + public void execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("state", getState()); + script.apply(map); + } + } + + public static class MockMetricAggMapScript implements ScriptedMetricAggContexts.MapScript.LeafFactory { + private final Map params; + private final Object state; + private final SearchLookup lookup; + private final Function, Object> script; + + MockMetricAggMapScript(Map params, Object state, SearchLookup lookup, + Function, Object> script) { + this.params = params; + this.state = state; + this.lookup = lookup; + this.script = script; + } + + @Override + public ScriptedMetricAggContexts.MapScript newInstance(LeafReaderContext context) { + return new ScriptedMetricAggContexts.MapScript(params, state, lookup, context) { + @Override + public void execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("state", getState()); + map.put("doc", getDoc()); + map.put("_score", get_score()); + + script.apply(map); + } + }; + } + } + + public static class MockMetricAggCombineScript extends ScriptedMetricAggContexts.CombineScript { + private final Function, Object> script; + + MockMetricAggCombineScript(Map params, Object state, + Function, Object> script) { + super(params, state); + this.script = script; + } + + public Object execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("state", getState()); + return script.apply(map); + } + } + + public static class MockMetricAggReduceScript extends ScriptedMetricAggContexts.ReduceScript { + private final Function, Object> script; + + MockMetricAggReduceScript(Map params, List states, + Function, Object> script) { + super(params, states); + this.script = script; + } + + public Object execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("states", getStates()); + return script.apply(map); + } + } + public static Script mockInlineScript(final String script) { return new Script(ScriptType.INLINE, "mock", script, emptyMap()); } @@ -343,15 +475,15 @@ public double execute(Map params, double[] values) { return MovingFunctions.unweightedAvg(values); } } - + public class MockScoreScript implements ScoreScript.Factory { - + private final Function, Object> scripts; - + MockScoreScript(Function, Object> scripts) { this.scripts = scripts; } - + @Override public ScoreScript.LeafFactory newFactory(Map params, SearchLookup lookup) { return new ScoreScript.LeafFactory() { @@ -359,7 +491,7 @@ public ScoreScript.LeafFactory newFactory(Map params, SearchLook public boolean needs_score() { return true; } - + @Override public ScoreScript newInstance(LeafReaderContext ctx) throws IOException { Scorer[] scorerHolder = new Scorer[1]; @@ -373,7 +505,7 @@ public double execute() { } return ((Number) scripts.apply(vars)).doubleValue(); } - + @Override public void setScorer(Scorer scorer) { scorerHolder[0] = scorer; From 86ab3a2d1a3bc5f4478d25cd28906915c2ec03fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 25 Jun 2018 15:59:03 +0200 Subject: [PATCH 92/92] Reduce number of raw types warnings (#31523) A first attempt to reduce the number of raw type warnings, most of the time by using the unbounded wildcard. --- .../search/NoopSearchRequestBuilder.java | 4 +- .../ingest/common/AppendProcessorTests.java | 6 +-- .../java/org/elasticsearch/action/Action.java | 2 +- .../action/bulk/BulkItemRequest.java | 6 +-- .../action/bulk/BulkProcessor.java | 10 ++--- .../action/bulk/BulkRequest.java | 22 +++++------ .../action/bulk/TransportBulkAction.java | 28 +++++++------- .../TransportBroadcastReplicationAction.java | 1 + .../common/component/AbstractComponent.java | 2 +- .../component/AbstractLifecycleComponent.java | 2 +- .../builders/GeometryCollectionBuilder.java | 11 +++--- .../geo/builders/MultiPolygonBuilder.java | 11 +++--- .../common/inject/AbstractProcessor.java | 2 +- .../elasticsearch/common/inject/Binder.java | 2 +- .../common/inject/BindingProcessor.java | 4 +- .../common/inject/spi/ElementVisitor.java | 2 +- .../settings/AbstractScopedSettings.java | 4 +- .../common/util/BigObjectArray.java | 2 +- .../plain/AtomicDoubleFieldData.java | 4 +- .../index/mapper/BinaryFieldMapper.java | 3 +- .../index/mapper/BooleanFieldMapper.java | 3 +- .../index/query/BaseTermQueryBuilder.java | 2 +- .../ingest/PipelineExecutionService.java | 4 +- .../elasticsearch/plugins/ActionPlugin.java | 4 +- .../highlight/AbstractHighlighterBuilder.java | 2 +- .../completion/context/ContextBuilder.java | 2 +- .../completion/context/ContextMapping.java | 2 +- .../completion/context/ContextMappings.java | 28 +++++++------- .../CollapsingTopDocsCollectorTests.java | 28 +++++++------- .../org/elasticsearch/action/ActionTests.java | 2 +- .../action/bulk/BulkRequestTests.java | 4 +- .../bulk/TransportBulkActionIngestTests.java | 4 +- .../BroadcastReplicationTests.java | 13 ++++--- .../AbstractTermVectorsTestCase.java | 4 +- .../client/AbstractClientHeadersTestCase.java | 6 +-- .../common/geo/BaseGeoParsingTestCase.java | 6 +-- .../AbstractShapeBuilderTestCase.java | 4 +- .../common/xcontent/BaseXContentTestCase.java | 10 ++--- .../AbstractFieldDataImplTestCase.java | 15 ++++---- .../fielddata/AbstractFieldDataTestCase.java | 2 +- .../AbstractStringFieldDataTestCase.java | 4 +- .../AggregationTestScriptsPlugin.java | 13 +++---- .../aggregations/AggregationsTests.java | 18 ++++----- .../aggregations/metrics/CardinalityIT.java | 8 ++-- .../suggest/CompletionSuggestSearchIT.java | 8 ++-- .../ContextCompletionSuggestSearchIT.java | 37 ++++++++++--------- .../CompletionSuggesterBuilderTests.java | 2 +- .../hamcrest/ElasticsearchGeoAssertions.java | 30 +++++++-------- .../bootstrap/BootstrapForTesting.java | 3 +- .../AbstractSimpleTransportTestCase.java | 8 ++-- .../xpack/core/XPackClientPlugin.java | 4 +- .../elasticsearch/xpack/core/XPackPlugin.java | 6 +-- .../core/watcher/actions/ActionFactory.java | 3 +- .../core/watcher/actions/ActionWrapper.java | 16 ++++---- .../aggregation/AggregationTestUtils.java | 4 +- .../integration/BasicDistributedJobsIT.java | 2 +- .../BatchedDocumentsIteratorTests.java | 2 +- .../AutodetectCommunicatorTests.java | 4 +- .../AutodetectProcessManagerTests.java | 6 +-- .../security/authz/AuthorizationService.java | 2 +- .../security/authz/AuthorizationUtils.java | 4 +- .../security/audit/index/AuditTrailTests.java | 4 +- .../authc/AuthenticationServiceTests.java | 19 ++++++---- .../authz/AuthorizationServiceTests.java | 4 +- .../xpack/watcher/input/InputRegistry.java | 6 +-- .../watcher/actions/ActionWrapperTests.java | 5 ++- .../throttler/ActionThrottleTests.java | 14 +++---- 67 files changed, 261 insertions(+), 248 deletions(-) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java index e73edb143e0d0..e66ef6208a6cf 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java @@ -329,7 +329,7 @@ public NoopSearchRequestBuilder addSort(String field, SortOrder order) { * * @see org.elasticsearch.search.sort.SortBuilders */ - public NoopSearchRequestBuilder addSort(SortBuilder sort) { + public NoopSearchRequestBuilder addSort(SortBuilder sort) { sourceBuilder().sort(sort); return this; } @@ -415,7 +415,7 @@ public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer) { * @param window rescore window * @return this for chaining */ - public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer, int window) { + public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer, int window) { sourceBuilder().clearRescorers(); return addRescorer(rescorer.windowSize(window)); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java index 2042bb745bc1b..7a48c9ace326d 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java @@ -91,7 +91,7 @@ public void testAppendValuesToNonExistingList() throws Exception { appendProcessor = createAppendProcessor(field, values); } appendProcessor.execute(ingestDocument); - List list = ingestDocument.getFieldValue(field, List.class); + List list = ingestDocument.getFieldValue(field, List.class); assertThat(list, not(sameInstance(values))); assertThat(list, equalTo(values)); } @@ -115,7 +115,7 @@ public void testConvertScalarToList() throws Exception { appendProcessor = createAppendProcessor(field, values); } appendProcessor.execute(ingestDocument); - List fieldValue = ingestDocument.getFieldValue(field, List.class); + List fieldValue = ingestDocument.getFieldValue(field, List.class); assertThat(fieldValue.size(), equalTo(values.size() + 1)); assertThat(fieldValue.get(0), equalTo(initialValue)); for (int i = 1; i < values.size() + 1; i++) { @@ -144,7 +144,7 @@ public void testAppendMetadataExceptVersion() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Object initialValue = ingestDocument.getSourceAndMetadata().get(randomMetaData.getFieldName()); appendProcessor.execute(ingestDocument); - List list = ingestDocument.getFieldValue(randomMetaData.getFieldName(), List.class); + List list = ingestDocument.getFieldValue(randomMetaData.getFieldName(), List.class); if (initialValue == null) { assertThat(list, equalTo(values)); } else { diff --git a/server/src/main/java/org/elasticsearch/action/Action.java b/server/src/main/java/org/elasticsearch/action/Action.java index 2fc49d69ed1cc..771762ad15c30 100644 --- a/server/src/main/java/org/elasticsearch/action/Action.java +++ b/server/src/main/java/org/elasticsearch/action/Action.java @@ -57,7 +57,7 @@ public TransportRequestOptions transportOptions(Settings settings) { @Override public boolean equals(Object o) { - return o instanceof Action && name.equals(((Action) o).name()); + return o instanceof Action && name.equals(((Action) o).name()); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java index 3180f57d20409..434f87de121ed 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java @@ -31,7 +31,7 @@ public class BulkItemRequest implements Streamable { private int id; - private DocWriteRequest request; + private DocWriteRequest request; private volatile BulkItemResponse primaryResponse; BulkItemRequest() { @@ -39,7 +39,7 @@ public class BulkItemRequest implements Streamable { } // NOTE: public for testing only - public BulkItemRequest(int id, DocWriteRequest request) { + public BulkItemRequest(int id, DocWriteRequest request) { this.id = id; this.request = request; } @@ -48,7 +48,7 @@ public int id() { return id; } - public DocWriteRequest request() { + public DocWriteRequest request() { return request; } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index 9febbd63962ee..f8f9d154b14d6 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -248,24 +248,24 @@ public synchronized boolean awaitClose(long timeout, TimeUnit unit) throws Inter * (for example, if no id is provided, one will be generated, or usage of the create flag). */ public BulkProcessor add(IndexRequest request) { - return add((DocWriteRequest) request); + return add((DocWriteRequest) request); } /** * Adds an {@link DeleteRequest} to the list of actions to execute. */ public BulkProcessor add(DeleteRequest request) { - return add((DocWriteRequest) request); + return add((DocWriteRequest) request); } /** * Adds either a delete or an index request. */ - public BulkProcessor add(DocWriteRequest request) { + public BulkProcessor add(DocWriteRequest request) { return add(request, null); } - public BulkProcessor add(DocWriteRequest request, @Nullable Object payload) { + public BulkProcessor add(DocWriteRequest request, @Nullable Object payload) { internalAdd(request, payload); return this; } @@ -280,7 +280,7 @@ protected void ensureOpen() { } } - private synchronized void internalAdd(DocWriteRequest request, @Nullable Object payload) { + private synchronized void internalAdd(DocWriteRequest request, @Nullable Object payload) { ensureOpen(); bulkRequest.add(request, payload); executeIfNeeded(); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index ca5d997dc3882..989172b711a13 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -83,7 +83,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques * {@link WriteRequest}s to this but java doesn't support syntax to declare that everything in the array has both types so we declare * the one with the least casts. */ - final List requests = new ArrayList<>(); + final List> requests = new ArrayList<>(); private final Set indices = new HashSet<>(); List payloads = null; @@ -99,14 +99,14 @@ public BulkRequest() { /** * Adds a list of requests to be executed. Either index or delete requests. */ - public BulkRequest add(DocWriteRequest... requests) { - for (DocWriteRequest request : requests) { + public BulkRequest add(DocWriteRequest... requests) { + for (DocWriteRequest request : requests) { add(request, null); } return this; } - public BulkRequest add(DocWriteRequest request) { + public BulkRequest add(DocWriteRequest request) { return add(request, null); } @@ -116,7 +116,7 @@ public BulkRequest add(DocWriteRequest request) { * @param payload Optional payload * @return the current bulk request */ - public BulkRequest add(DocWriteRequest request, @Nullable Object payload) { + public BulkRequest add(DocWriteRequest request, @Nullable Object payload) { if (request instanceof IndexRequest) { add((IndexRequest) request, payload); } else if (request instanceof DeleteRequest) { @@ -133,8 +133,8 @@ public BulkRequest add(DocWriteRequest request, @Nullable Object payload) { /** * Adds a list of requests to be executed. Either index or delete requests. */ - public BulkRequest add(Iterable requests) { - for (DocWriteRequest request : requests) { + public BulkRequest add(Iterable> requests) { + for (DocWriteRequest request : requests) { add(request); } return this; @@ -223,7 +223,7 @@ private void addPayload(Object payload) { /** * The list of requests in this bulk request. */ - public List requests() { + public List> requests() { return this.requests; } @@ -527,7 +527,7 @@ private int findNextMarker(byte marker, int from, BytesReference data, int lengt * @return Whether this bulk request contains index request with an ingest pipeline enabled. */ public boolean hasIndexRequestsWithPipelines() { - for (DocWriteRequest actionRequest : requests) { + for (DocWriteRequest actionRequest : requests) { if (actionRequest instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) actionRequest; if (Strings.hasText(indexRequest.getPipeline())) { @@ -545,7 +545,7 @@ public ActionRequestValidationException validate() { if (requests.isEmpty()) { validationException = addValidationError("no requests added", validationException); } - for (DocWriteRequest request : requests) { + for (DocWriteRequest request : requests) { // We first check if refresh has been set if (((WriteRequest) request).getRefreshPolicy() != RefreshPolicy.NONE) { validationException = addValidationError( @@ -580,7 +580,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); waitForActiveShards.writeTo(out); out.writeVInt(requests.size()); - for (DocWriteRequest request : requests) { + for (DocWriteRequest request : requests) { DocWriteRequest.writeDocumentRequest(out, request); } refreshPolicy.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 247970dafcee3..a6ed8de653007 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -143,8 +143,8 @@ protected void doExecute(Task task, BulkRequest bulkRequest, ActionListener indices = bulkRequest.requests.stream() // delete requests should not attempt to create the index (if the index does not // exists), unless an external versioning is used - .filter(request -> request.opType() != DocWriteRequest.OpType.DELETE - || request.versionType() == VersionType.EXTERNAL + .filter(request -> request.opType() != DocWriteRequest.OpType.DELETE + || request.versionType() == VersionType.EXTERNAL || request.versionType() == VersionType.EXTERNAL_GTE) .map(DocWriteRequest::index) .collect(Collectors.toSet()); @@ -184,7 +184,7 @@ public void onFailure(Exception e) { if (!(ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException)) { // fail all requests involving this index, if create didn't work for (int i = 0; i < bulkRequest.requests.size(); i++) { - DocWriteRequest request = bulkRequest.requests.get(i); + DocWriteRequest request = bulkRequest.requests.get(i); if (request != null && setResponseFailureIfIndexMatches(responses, i, request, index, e)) { bulkRequest.requests.set(i, null); } @@ -221,7 +221,7 @@ void createIndex(String index, TimeValue timeout, ActionListener responses, int idx, DocWriteRequest request, String index, Exception e) { + private boolean setResponseFailureIfIndexMatches(AtomicArray responses, int idx, DocWriteRequest request, String index, Exception e) { if (index.equals(request.index())) { responses.set(idx, new BulkItemResponse(idx, request.opType(), new BulkItemResponse.Failure(request.index(), request.type(), request.id(), e))); return true; @@ -271,7 +271,7 @@ protected void doRun() throws Exception { final ConcreteIndices concreteIndices = new ConcreteIndices(clusterState, indexNameExpressionResolver); MetaData metaData = clusterState.metaData(); for (int i = 0; i < bulkRequest.requests.size(); i++) { - DocWriteRequest docWriteRequest = bulkRequest.requests.get(i); + DocWriteRequest docWriteRequest = bulkRequest.requests.get(i); //the request can only be null because we set it to null in the previous step, so it gets ignored if (docWriteRequest == null) { continue; @@ -315,7 +315,7 @@ protected void doRun() throws Exception { // first, go over all the requests and create a ShardId -> Operations mapping Map> requestsByShard = new HashMap<>(); for (int i = 0; i < bulkRequest.requests.size(); i++) { - DocWriteRequest request = bulkRequest.requests.get(i); + DocWriteRequest request = bulkRequest.requests.get(i); if (request == null) { continue; } @@ -362,7 +362,7 @@ public void onFailure(Exception e) { // create failures for all relevant requests for (BulkItemRequest request : requests) { final String indexName = concreteIndices.getConcreteIndex(request.index()).getName(); - DocWriteRequest docWriteRequest = request.request(); + DocWriteRequest docWriteRequest = request.request(); responses.set(request.id(), new BulkItemResponse(request.id(), docWriteRequest.opType(), new BulkItemResponse.Failure(indexName, docWriteRequest.type(), docWriteRequest.id(), e))); } @@ -418,7 +418,7 @@ public void onTimeout(TimeValue timeout) { }); } - private boolean addFailureIfIndexIsUnavailable(DocWriteRequest request, int idx, final ConcreteIndices concreteIndices, + private boolean addFailureIfIndexIsUnavailable(DocWriteRequest request, int idx, final ConcreteIndices concreteIndices, final MetaData metaData) { IndexNotFoundException cannotCreate = indicesThatCannotBeCreated.get(request.index()); if (cannotCreate != null) { @@ -442,7 +442,7 @@ private boolean addFailureIfIndexIsUnavailable(DocWriteRequest request, int idx, return false; } - private void addFailure(DocWriteRequest request, int idx, Exception unavailableException) { + private void addFailure(DocWriteRequest request, int idx, Exception unavailableException) { BulkItemResponse.Failure failure = new BulkItemResponse.Failure(request.index(), request.type(), request.id(), unavailableException); BulkItemResponse bulkItemResponse = new BulkItemResponse(idx, request.opType(), failure); @@ -471,7 +471,7 @@ Index getConcreteIndex(String indexOrAlias) { return indices.get(indexOrAlias); } - Index resolveIfAbsent(DocWriteRequest request) { + Index resolveIfAbsent(DocWriteRequest request) { Index concreteIndex = indices.get(request.index()); if (concreteIndex == null) { concreteIndex = indexNameExpressionResolver.concreteSingleIndex(state, request); @@ -512,7 +512,7 @@ void processBulkIndexIngestRequest(Task task, BulkRequest original, ActionListen }); } - static final class BulkRequestModifier implements Iterator { + static final class BulkRequestModifier implements Iterator> { final BulkRequest bulkRequest; final SparseFixedBitSet failedSlots; @@ -528,7 +528,7 @@ static final class BulkRequestModifier implements Iterator { } @Override - public DocWriteRequest next() { + public DocWriteRequest next() { return bulkRequest.requests().get(++currentSlot); } @@ -547,10 +547,10 @@ BulkRequest getBulkRequest() { modifiedBulkRequest.timeout(bulkRequest.timeout()); int slot = 0; - List requests = bulkRequest.requests(); + List> requests = bulkRequest.requests(); originalSlots = new int[requests.size()]; // oversize, but that's ok for (int i = 0; i < requests.size(); i++) { - DocWriteRequest request = requests.get(i); + DocWriteRequest request = requests.get(i); if (failedSlots.get(i) == false) { modifiedBulkRequest.add(request); originalSlots[slot++] = i; diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index aa3784efdd04f..1adfdbca8786b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.support.replication; import com.carrotsearch.hppc.cursors.IntObjectCursor; + import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java index 8cb51f2b06b0e..62d6e7e311d5d 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java @@ -39,7 +39,7 @@ public AbstractComponent(Settings settings) { this.settings = settings; } - public AbstractComponent(Settings settings, Class customClass) { + public AbstractComponent(Settings settings, Class customClass) { this.logger = LogManager.getLogger(customClass); this.deprecationLogger = new DeprecationLogger(logger); this.settings = settings; diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java index 2ed43ccaa24e6..de14e0cd53db6 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java @@ -35,7 +35,7 @@ protected AbstractLifecycleComponent(Settings settings) { super(settings); } - protected AbstractLifecycleComponent(Settings settings, Class customClass) { + protected AbstractLifecycleComponent(Settings settings, Class customClass) { super(settings, customClass); } diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index b9c23842a5a8c..d2ff86ea63ced 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -19,23 +19,22 @@ package org.elasticsearch.common.geo.builders; -import org.elasticsearch.common.geo.GeoShapeType; -import org.elasticsearch.common.geo.parsers.ShapeParser; -import org.elasticsearch.common.geo.parsers.GeoWKTParser; -import org.locationtech.spatial4j.shape.Shape; - import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.geo.parsers.GeoWKTParser; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.locationtech.spatial4j.shape.Shape; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Objects; -public class GeometryCollectionBuilder extends ShapeBuilder { +public class GeometryCollectionBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.GEOMETRYCOLLECTION; diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index 3d917bcff6e48..bac74c29dd805 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -20,15 +20,14 @@ package org.elasticsearch.common.geo.builders; import org.elasticsearch.common.geo.GeoShapeType; -import org.elasticsearch.common.geo.parsers.ShapeParser; -import org.elasticsearch.common.geo.parsers.GeoWKTParser; -import org.locationtech.spatial4j.shape.Shape; -import org.locationtech.jts.geom.Coordinate; - import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.geo.parsers.GeoWKTParser; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.spatial4j.shape.Shape; import java.io.IOException; import java.util.ArrayList; @@ -36,7 +35,7 @@ import java.util.Locale; import java.util.Objects; -public class MultiPolygonBuilder extends ShapeBuilder { +public class MultiPolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOLYGON; diff --git a/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java index 8b501a561292e..30b799601487f 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java @@ -85,7 +85,7 @@ public Boolean visit(ScopeBinding scopeBinding) { } @Override - public Boolean visit(InjectionRequest injectionRequest) { + public Boolean visit(InjectionRequest injectionRequest) { return false; } diff --git a/server/src/main/java/org/elasticsearch/common/inject/Binder.java b/server/src/main/java/org/elasticsearch/common/inject/Binder.java index 2a4799cefccb1..03d164bcbaa52 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/Binder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/Binder.java @@ -360,7 +360,7 @@ void bindListener(Matcher> typeMatcher, * @return a binder that shares its configuration with this binder. * @since 2.0 */ - Binder skipSources(Class... classesToSkip); + Binder skipSources(Class... classesToSkip); /** * Creates a new private child environment for bindings and other configuration. The returned diff --git a/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java index e560eeb1efd63..971b100a6799e 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java @@ -72,7 +72,7 @@ public Boolean visit(Binding command) { if (Void.class.equals(command.getKey().getRawType())) { if (command instanceof ProviderInstanceBinding - && ((ProviderInstanceBinding) command).getProviderInstance() instanceof ProviderMethod) { + && ((ProviderInstanceBinding) command).getProviderInstance() instanceof ProviderMethod) { errors.voidProviderMethod(); } else { errors.missingConstantValues(); @@ -274,7 +274,7 @@ private void putBinding(BindingImpl binding) { */ private boolean isOkayDuplicate(Binding original, BindingImpl binding) { if (original instanceof ExposedBindingImpl) { - ExposedBindingImpl exposed = (ExposedBindingImpl) original; + ExposedBindingImpl exposed = (ExposedBindingImpl) original; InjectorImpl exposedFrom = (InjectorImpl) exposed.getPrivateElements().getInjector(); return (exposedFrom == binding.getInjector()); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java b/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java index 6711456004380..8440ab98b5cb8 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java @@ -46,7 +46,7 @@ public interface ElementVisitor { /** * Visit a request to inject the instance fields and methods of an instance. */ - V visit(InjectionRequest request); + V visit(InjectionRequest request); /** * Visit a request to inject the static fields and methods of type. diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index eb4e294642417..8847c8138a706 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -83,7 +83,7 @@ protected AbstractScopedSettings(Settings settings, Set> settingsSet, this.keySettings = Collections.unmodifiableMap(keySettings); } - protected void validateSettingKey(Setting setting) { + protected void validateSettingKey(Setting setting) { if (isValidKey(setting.getKey()) == false && (setting.isGroupSetting() && isValidGroupKey(setting.getKey()) || isValidAffixKey(setting.getKey())) == false || setting.getKey().endsWith(".0")) { throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]"); @@ -366,7 +366,7 @@ void validate(final String key, final Settings settings, final boolean validateD * @throws IllegalArgumentException if the setting is invalid */ void validate(final String key, final Settings settings, final boolean validateDependencies, final boolean validateInternalIndex) { - Setting setting = getRaw(key); + Setting setting = getRaw(key); if (setting == null) { LevensteinDistance ld = new LevensteinDistance(); List> scoredKeys = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java b/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java index 1ed012e2bb393..77ac63a984f55 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java @@ -32,7 +32,7 @@ */ final class BigObjectArray extends AbstractBigArray implements ObjectArray { - private static final BigObjectArray ESTIMATOR = new BigObjectArray(0, BigArrays.NON_RECYCLING_INSTANCE); + private static final BigObjectArray ESTIMATOR = new BigObjectArray(0, BigArrays.NON_RECYCLING_INSTANCE); private Object[][] pages; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java index 82ae0bb5bf1ea..6fd08b82668f6 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java @@ -48,7 +48,7 @@ public long ramBytesUsed() { } @Override - public final ScriptDocValues getScriptValues() { + public final ScriptDocValues getScriptValues() { return new ScriptDocValues.Doubles(getDoubleValues()); } @@ -69,7 +69,7 @@ public static AtomicNumericFieldData empty(final int maxDoc) { public SortedNumericDoubleValues getDoubleValues() { return FieldData.emptySortedNumericDoubles(); } - + @Override public Collection getChildResources() { return Collections.emptyList(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index e19bdb6708370..69b6a6e04a936 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -80,7 +80,8 @@ public BinaryFieldMapper build(BuilderContext context) { public static class TypeParser implements Mapper.TypeParser { @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + public BinaryFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) + throws MapperParsingException { BinaryFieldMapper.Builder builder = new BinaryFieldMapper.Builder(name); parseField(builder, name, node, parserContext); return builder; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index c50a7d18113bf..cb44e777f871d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -94,7 +94,8 @@ public BooleanFieldMapper build(BuilderContext context) { public static class TypeParser implements Mapper.TypeParser { @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + public BooleanFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) + throws MapperParsingException { BooleanFieldMapper.Builder builder = new BooleanFieldMapper.Builder(name); parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { diff --git a/server/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java index a3ef36204f032..f06ee48d06b67 100644 --- a/server/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java @@ -163,7 +163,7 @@ protected final int doHashCode() { } @Override - protected final boolean doEquals(BaseTermQueryBuilder other) { + protected final boolean doEquals(QB other) { return Objects.equals(fieldName, other.fieldName) && Objects.equals(value, other.value); } diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index f1062f7b5384c..a8aca4fdfe59d 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -53,7 +53,7 @@ public PipelineExecutionService(PipelineStore store, ThreadPool threadPool) { this.threadPool = threadPool; } - public void executeBulkRequest(Iterable actionRequests, + public void executeBulkRequest(Iterable> actionRequests, BiConsumer itemFailureHandler, Consumer completionHandler) { threadPool.executor(ThreadPool.Names.WRITE).execute(new AbstractRunnable() { @@ -65,7 +65,7 @@ public void onFailure(Exception e) { @Override protected void doRun() throws Exception { - for (DocWriteRequest actionRequest : actionRequests) { + for (DocWriteRequest actionRequest : actionRequests) { IndexRequest indexRequest = null; if (actionRequest instanceof IndexRequest) { indexRequest = (IndexRequest) actionRequest; diff --git a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java index eb8b7130d7054..54d9ade581e89 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java @@ -19,9 +19,9 @@ package org.elasticsearch.plugins; +import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.TransportActions; @@ -68,7 +68,7 @@ public interface ActionPlugin { * Client actions added by this plugin. This defaults to all of the {@linkplain Action} in * {@linkplain ActionPlugin#getActions()}. */ - default List getClientActions() { + default List> getClientActions() { return getActions().stream().map(a -> a.action).collect(Collectors.toList()); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java index e59bd718d3226..7888f6cd5a098 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java @@ -115,7 +115,7 @@ public abstract class AbstractHighlighterBuilder template, QueryBuilder queryBuilder) { preTags = template.preTags; postTags = template.postTags; fragmentSize = template.fragmentSize; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java index 9e31d8370cbe3..b6713f81ec48c 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java @@ -22,7 +22,7 @@ /** * Builder for {@link ContextMapping} */ -public abstract class ContextBuilder { +public abstract class ContextBuilder> { protected String name; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java index 7eab4e072f146..1aa82eeb2190a 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java @@ -143,7 +143,7 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ContextMapping that = (ContextMapping) o; + ContextMapping that = (ContextMapping) o; if (type != that.type) return false; return name.equals(that.name); } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java index 4d6b53296f157..961d7fd9f59a7 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java @@ -57,10 +57,10 @@ public class ContextMappings implements ToXContent { private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ContextMappings.class)); - private final List contextMappings; - private final Map contextNameMap; + private final List> contextMappings; + private final Map> contextNameMap; - public ContextMappings(List contextMappings) { + public ContextMappings(List> contextMappings) { if (contextMappings.size() > 255) { // we can support more, but max of 255 (1 byte) unique context types per suggest field // seems reasonable? @@ -68,7 +68,7 @@ public ContextMappings(List contextMappings) { } this.contextMappings = contextMappings; contextNameMap = new HashMap<>(contextMappings.size()); - for (ContextMapping mapping : contextMappings) { + for (ContextMapping mapping : contextMappings) { contextNameMap.put(mapping.name(), mapping); } } @@ -84,8 +84,8 @@ public int size() { /** * Returns a context mapping by its name */ - public ContextMapping get(String name) { - ContextMapping contextMapping = contextNameMap.get(name); + public ContextMapping get(String name) { + ContextMapping contextMapping = contextNameMap.get(name); if (contextMapping == null) { List keys = new ArrayList<>(contextNameMap.keySet()); Collections.sort(keys); @@ -138,7 +138,7 @@ protected Iterable contexts() { for (int typeId = 0; typeId < contextMappings.size(); typeId++) { scratch.setCharAt(0, (char) typeId); scratch.setLength(1); - ContextMapping mapping = contextMappings.get(typeId); + ContextMapping mapping = contextMappings.get(typeId); Set contexts = new HashSet<>(mapping.parseContext(document)); if (this.contexts.get(mapping.name()) != null) { contexts.addAll(this.contexts.get(mapping.name())); @@ -173,7 +173,7 @@ public ContextQuery toContextQuery(CompletionQuery query, Map mapping = contextMappings.get(typeId); List internalQueryContext = queryContexts.get(mapping.name()); if (internalQueryContext != null) { for (ContextMapping.InternalQueryContext context : internalQueryContext) { @@ -204,7 +204,7 @@ public Map> getNamedContexts(List contex for (CharSequence typedContext : contexts) { int typeId = typedContext.charAt(0); assert typeId < contextMappings.size() : "Returned context has invalid type"; - ContextMapping mapping = contextMappings.get(typeId); + ContextMapping mapping = contextMappings.get(typeId); Set contextEntries = contextMap.get(mapping.name()); if (contextEntries == null) { contextEntries = new HashSet<>(); @@ -224,10 +224,10 @@ public Map> getNamedContexts(List contex * */ public static ContextMappings load(Object configuration, Version indexVersionCreated) throws ElasticsearchParseException { - final List contextMappings; + final List> contextMappings; if (configuration instanceof List) { contextMappings = new ArrayList<>(); - List configurations = (List)configuration; + List configurations = (List) configuration; for (Object contextConfig : configurations) { contextMappings.add(load((Map) contextConfig, indexVersionCreated)); } @@ -242,10 +242,10 @@ public static ContextMappings load(Object configuration, Version indexVersionCre return new ContextMappings(contextMappings); } - private static ContextMapping load(Map contextConfig, Version indexVersionCreated) { + private static ContextMapping load(Map contextConfig, Version indexVersionCreated) { String name = extractRequiredValue(contextConfig, FIELD_NAME); String type = extractRequiredValue(contextConfig, FIELD_TYPE); - final ContextMapping contextMapping; + final ContextMapping contextMapping; switch (Type.fromString(type)) { case CATEGORY: contextMapping = CategoryContextMapping.load(name, contextConfig); @@ -276,7 +276,7 @@ private static String extractRequiredValue(Map contextConfig, St */ @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - for (ContextMapping contextMapping : contextMappings) { + for (ContextMapping contextMapping : contextMappings) { builder.startObject(); contextMapping.toXContent(builder, params); builder.endObject(); diff --git a/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java b/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java index 0290a6c5d100b..bce5965e50b6b 100644 --- a/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java +++ b/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java @@ -75,7 +75,7 @@ public String toString() { } } - interface CollapsingDocValuesProducer { + interface CollapsingDocValuesProducer> { T randomGroup(int maxGroup); void add(Document doc, T value, boolean multivalued); @@ -83,14 +83,14 @@ interface CollapsingDocValuesProducer { SortField sortField(boolean multivalued); } - void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, boolean numeric) throws IOException { + > void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, boolean numeric) throws IOException { assertSearchCollapse(dvProducers, numeric, true, true); assertSearchCollapse(dvProducers, numeric, true, false); assertSearchCollapse(dvProducers, numeric, false, true); assertSearchCollapse(dvProducers, numeric, false, false); } - private void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, + private > void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, boolean numeric, boolean multivalued, boolean trackMaxScores) throws IOException { final int numDocs = randomIntBetween(1000, 2000); @@ -120,7 +120,7 @@ private void assertSearchCollapse(CollapsingDocValuesProd int expectedNumGroups = values.size(); - final CollapsingTopDocsCollector collapsingCollector; + final CollapsingTopDocsCollector collapsingCollector; if (numeric) { collapsingCollector = CollapsingTopDocsCollector.createNumeric(collapseField.getField(), sort, expectedNumGroups, trackMaxScores); @@ -199,7 +199,7 @@ private void assertSearchCollapse(CollapsingDocValuesProd final Weight weight = searcher.createNormalizedWeight(new MatchAllDocsQuery(), true); for (int shardIDX = 0; shardIDX < subSearchers.length; shardIDX++) { final SegmentSearcher subSearcher = subSearchers[shardIDX]; - final CollapsingTopDocsCollector c; + final CollapsingTopDocsCollector c; if (numeric) { c = CollapsingTopDocsCollector.createNumeric(collapseField.getField(), sort, expectedNumGroups, trackMaxScores); } else { @@ -221,7 +221,7 @@ private static void assertTopDocsEquals(CollapseTopFieldDocs topDocs1, CollapseT } public void testCollapseLong() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public Long randomGroup(int maxGroup) { return randomNonNegativeLong() % maxGroup; @@ -249,7 +249,7 @@ public SortField sortField(boolean multivalued) { } public void testCollapseInt() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public Integer randomGroup(int maxGroup) { return randomIntBetween(0, maxGroup - 1); @@ -277,10 +277,10 @@ public SortField sortField(boolean multivalued) { } public void testCollapseFloat() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public Float randomGroup(int maxGroup) { - return new Float(randomIntBetween(0, maxGroup - 1)); + return Float.valueOf(randomIntBetween(0, maxGroup - 1)); } @Override @@ -305,10 +305,10 @@ public SortField sortField(boolean multivalued) { } public void testCollapseDouble() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public Double randomGroup(int maxGroup) { - return new Double(randomIntBetween(0, maxGroup - 1)); + return Double.valueOf(randomIntBetween(0, maxGroup - 1)); } @Override @@ -333,7 +333,7 @@ public SortField sortField(boolean multivalued) { } public void testCollapseString() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public BytesRef randomGroup(int maxGroup) { return new BytesRef(Integer.toString(randomIntBetween(0, maxGroup - 1))); @@ -383,7 +383,7 @@ public void testEmptyNumericSegment() throws Exception { SortField sortField = new SortField("group", SortField.Type.LONG); sortField.setMissingValue(Long.MAX_VALUE); Sort sort = new Sort(sortField); - final CollapsingTopDocsCollector collapsingCollector = + final CollapsingTopDocsCollector collapsingCollector = CollapsingTopDocsCollector.createNumeric("group", sort, 10, false); searcher.search(new MatchAllDocsQuery(), collapsingCollector); CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs(); @@ -419,7 +419,7 @@ public void testEmptySortedSegment() throws Exception { final IndexReader reader = w.getReader(); final IndexSearcher searcher = newSearcher(reader); Sort sort = new Sort(new SortField("group", SortField.Type.STRING_VAL)); - final CollapsingTopDocsCollector collapsingCollector = + final CollapsingTopDocsCollector collapsingCollector = CollapsingTopDocsCollector.createKeyword("group", sort, 10, false); searcher.search(new MatchAllDocsQuery(), collapsingCollector); CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs(); diff --git a/server/src/test/java/org/elasticsearch/action/ActionTests.java b/server/src/test/java/org/elasticsearch/action/ActionTests.java index c159d36ca9158..a7dca3f098d05 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionTests.java @@ -24,7 +24,7 @@ public class ActionTests extends ESTestCase { public void testEquals() { - class FakeAction extends Action { + class FakeAction extends Action { protected FakeAction(String name) { super(name); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 1d03d065e7af7..f1842b5b0dd1d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -124,7 +124,7 @@ public void testBulkAllowExplicitIndex() throws Exception { public void testBulkAddIterable() { BulkRequest bulkRequest = Requests.bulkRequest(); - List requests = new ArrayList<>(); + List> requests = new ArrayList<>(); requests.add(new IndexRequest("test", "test", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value")); requests.add(new UpdateRequest("test", "test", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value")); requests.add(new DeleteRequest("test", "test", "id")); @@ -279,7 +279,7 @@ public void testSmileIsSupported() throws IOException { BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(data, null, null, xContentType); assertEquals(1, bulkRequest.requests().size()); - DocWriteRequest docWriteRequest = bulkRequest.requests().get(0); + DocWriteRequest docWriteRequest = bulkRequest.requests().get(0); assertEquals(DocWriteRequest.OpType.INDEX, docWriteRequest.opType()); assertEquals("index", docWriteRequest.index()); assertEquals("type", docWriteRequest.type()); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index bcd16386df3d4..66527726573a5 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -84,7 +84,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { @Captor ArgumentCaptor> remoteResponseHandler; @Captor - ArgumentCaptor> bulkDocsItr; + ArgumentCaptor>> bulkDocsItr; /** The actual action we want to test, with real indexing mocked */ TestTransportBulkAction action; @@ -225,7 +225,7 @@ public void testIngestLocal() throws Exception { assertTrue(failureCalled.get()); // now check success - Iterator req = bulkDocsItr.getValue().iterator(); + Iterator> req = bulkDocsItr.getValue().iterator(); failureHandler.getValue().accept((IndexRequest)req.next(), exception); // have an exception for our one index request indexRequest2.setPipeline(null); // this is done by the real pipeline execution service when processing completionHandler.getValue().accept(null); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index bfa45bb072dcf..012cc71437a80 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -18,9 +18,6 @@ */ package org.elasticsearch.action.support.replication; -import org.elasticsearch.action.support.ActionTestUtils; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.UnavailableShardsException; @@ -28,7 +25,9 @@ import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.flush.TransportFlushAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; @@ -41,6 +40,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -104,6 +104,7 @@ threadPool, BigArrays.NON_RECYCLING_INSTANCE, circuitBreakerService, new NamedWr new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), null); } + @Override @After public void tearDown() throws Exception { super.tearDown(); @@ -244,13 +245,15 @@ public FlushResponse assertImmediateResponse(String index, TransportFlushAction return flushResponse; } - public BroadcastResponse executeAndAssertImmediateResponse(TransportBroadcastReplicationAction broadcastAction, DummyBroadcastRequest request) { + public BroadcastResponse executeAndAssertImmediateResponse( + TransportBroadcastReplicationAction broadcastAction, + DummyBroadcastRequest request) { PlainActionFuture response = PlainActionFuture.newFuture(); broadcastAction.execute(request, response); return response.actionGet("5s"); } - private void assertBroadcastResponse(int total, int successful, int failed, BroadcastResponse response, Class exceptionClass) { + private void assertBroadcastResponse(int total, int successful, int failed, BroadcastResponse response, Class exceptionClass) { assertThat(response.getSuccessfulShards(), equalTo(successful)); assertThat(response.getTotalShards(), equalTo(total)); assertThat(response.getFailedShards(), equalTo(failed)); diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index bd76557f9a86f..e7606ec071895 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -167,7 +167,7 @@ protected static class TestConfig { public final boolean requestPositions; public final boolean requestOffsets; public final boolean requestPayloads; - public Class expectedException = null; + public Class expectedException = null; public TestConfig(TestDoc doc, String[] selectedFields, boolean requestPositions, boolean requestOffsets, boolean requestPayloads) { this.doc = doc; @@ -177,7 +177,7 @@ public TestConfig(TestDoc doc, String[] selectedFields, boolean requestPositions this.requestPayloads = requestPayloads; } - public TestConfig expectedException(Class exceptionClass) { + public TestConfig expectedException(Class exceptionClass) { this.expectedException = exceptionClass; return this; } diff --git a/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java index 5dea451dbacfd..31f6963536c50 100644 --- a/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java +++ b/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java @@ -20,8 +20,8 @@ package org.elasticsearch.client; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; @@ -56,7 +56,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { .put(ThreadContext.PREFIX + ".key2", "val 2") .build(); - private static final Action[] ACTIONS = new Action[] { + private static final Action[] ACTIONS = new Action[] { // client actions GetAction.INSTANCE, SearchAction.INSTANCE, DeleteAction.INSTANCE, DeleteStoredScriptAction.INSTANCE, IndexAction.INSTANCE, @@ -92,7 +92,7 @@ public void tearDown() throws Exception { terminate(threadPool); } - protected abstract Client buildClient(Settings headersSettings, Action[] testedActions); + protected abstract Client buildClient(Settings headersSettings, Action[] testedActions); public void testActions() { diff --git a/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java b/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java index 023932be6a9d0..420f5c5caefb2 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java @@ -18,13 +18,13 @@ */ package org.elasticsearch.common.geo; -import org.locationtech.jts.geom.Geometry; -import org.locationtech.jts.geom.GeometryFactory; import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions; +import org.locationtech.jts.geom.Geometry; +import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.spatial4j.shape.Shape; import org.locationtech.spatial4j.shape.ShapeCollection; import org.locationtech.spatial4j.shape.jts.JtsGeometry; @@ -49,7 +49,7 @@ abstract class BaseGeoParsingTestCase extends ESTestCase { public abstract void testParseEnvelope() throws IOException; public abstract void testParseGeometryCollection() throws IOException; - protected void assertValidException(XContentBuilder builder, Class expectedException) throws IOException { + protected void assertValidException(XContentBuilder builder, Class expectedException) throws IOException { try (XContentParser parser = createParser(builder)) { parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, expectedException); diff --git a/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 5f2c721533de9..20e159ded41e4 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -37,7 +37,7 @@ import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; -public abstract class AbstractShapeBuilderTestCase extends ESTestCase { +public abstract class AbstractShapeBuilderTestCase> extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; private static NamedWriteableRegistry namedWriteableRegistry; @@ -81,7 +81,7 @@ public void testFromXContent() throws IOException { XContentBuilder shuffled = shuffleXContent(builder); try (XContentParser shapeContentParser = createParser(shuffled)) { shapeContentParser.nextToken(); - ShapeBuilder parsedShape = ShapeParser.parse(shapeContentParser); + ShapeBuilder parsedShape = ShapeParser.parse(shapeContentParser); assertNotSame(testShape, parsedShape); assertEquals(testShape, parsedShape); assertEquals(testShape.hashCode(), parsedShape.hashCode()); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 690e7567e59ff..0efeae29c3cce 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -629,7 +629,7 @@ public void testToXContent() throws Exception { public void testMap() throws Exception { Map> maps = new HashMap<>(); - maps.put("{'map':null}", (Map) null); + maps.put("{'map':null}", (Map) null); maps.put("{'map':{}}", Collections.emptyMap()); maps.put("{'map':{'key':'value'}}", singletonMap("key", "value")); @@ -654,7 +654,7 @@ public void testMap() throws Exception { public void testIterable() throws Exception { Map> iterables = new HashMap<>(); - iterables.put("{'iter':null}", (Iterable) null); + iterables.put("{'iter':null}", (Iterable) null); iterables.put("{'iter':[]}", Collections.emptyList()); iterables.put("{'iter':['a','b']}", Arrays.asList("a", "b")); @@ -944,7 +944,7 @@ public void testSelfReferencingIterable() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder() .startObject() - .field("field", (Iterable) values) + .field("field", values) .endObject()); assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself")); } @@ -959,7 +959,7 @@ public void testSelfReferencingIterableOneLevel() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder() .startObject() - .field("field", (Iterable) values) + .field("field", values) .endObject()); assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself")); } @@ -972,7 +972,7 @@ public void testSelfReferencingIterableTwoLevels() throws IOException { List it1 = new ArrayList<>(); map0.put("foo", 0); - map0.put("it1", (Iterable) it1); // map 0 -> it1 + map0.put("it1", it1); // map 0 -> it1 it1.add(map1); it1.add(map2); // it 1 -> map 1, map 2 diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index df6328feabc86..cd1dc01d9ef4a 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.util.List; + import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -72,7 +73,7 @@ protected long minRamBytesUsed() { public void testDeletedDocs() throws Exception { add2SingleValuedDocumentsAndDeleteOneOfThem(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -86,7 +87,7 @@ public void testDeletedDocs() throws Exception { public void testSingleValueAllSet() throws Exception { fillSingleValueAllSet(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -156,7 +157,7 @@ public void assertValues(SortedBinaryDocValues values, int docId, String... actu public void testSingleValueWithMissing() throws Exception { fillSingleValueWithMissing(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -177,7 +178,7 @@ public void testMultiValueAllSet() throws Exception { // the segments are force merged to a single segment so that the sorted binary doc values can be asserted within a single segment. // Previously we used the SlowCompositeReaderWrapper but this is an unideal solution so force merging is a better idea. writer.forceMerge(1); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -211,7 +212,7 @@ public void testMultiValueAllSet() throws Exception { public void testMultiValueWithMissing() throws Exception { fillMultiValueWithMissing(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -227,7 +228,7 @@ public void testMultiValueWithMissing() throws Exception { public void testMissingValueForAll() throws Exception { fillAllMissing(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -251,7 +252,7 @@ public void testMissingValueForAll() throws Exception { public void testSortMultiValuesFields() throws Exception { fillExtendedMvSet(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); SortField sortField = diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index c204690c76e07..ee8f18aa11e6b 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -170,7 +170,7 @@ public void testEmpty() throws Exception { writer.addDocument(d); refreshReader(); - IndexFieldData fieldData = getForField("non_existing_field"); + IndexFieldData fieldData = getForField("non_existing_field"); int max = randomInt(7); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData previous = null; diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index a478d2c37426d..04cd13766176b 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -260,7 +260,7 @@ public void testActualMissingValue(boolean reverse) throws IOException { } } - final IndexFieldData indexFieldData = getForField("value"); + final IndexFieldData indexFieldData = getForField("value"); final String missingValue = values[1]; IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(missingValue, MultiValueMode.MIN, null, reverse); @@ -315,7 +315,7 @@ public void testSortMissing(boolean first, boolean reverse) throws IOException { writer.commit(); } } - final IndexFieldData indexFieldData = getForField("value"); + final IndexFieldData indexFieldData = getForField("value"); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(first ? "_first" : "_last", MultiValueMode.MIN, null, reverse); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java index bc98dda41d661..38d9e62604c46 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java @@ -22,6 +22,7 @@ import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; import java.util.HashMap; import java.util.Map; @@ -29,8 +30,6 @@ import static java.util.Collections.singletonMap; -import org.elasticsearch.script.ScriptType; - /** * This class contains various mocked scripts that are used in aggregations integration tests. */ @@ -68,32 +67,32 @@ protected Map, Object>> pluginScripts() { }); scripts.put("doc['value'].value", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get("value"); }); scripts.put("doc['value'].value - dec", vars -> { int dec = (int) vars.get("dec"); - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("value"); return value.getValue() - dec; }); scripts.put("doc['value'].value + inc", vars -> { int inc = (int) vars.get("inc"); - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("value"); return value.getValue() + inc; }); scripts.put("doc['values'].values", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get("values"); }); scripts.put(DECREMENT_ALL_VALUES.getIdOrCode(), vars -> { int dec = (int) vars.get("dec"); - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs values = (ScriptDocValues.Longs) doc.get("values"); double[] res = new double[values.size()]; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java index 29d8e327d5cd7..79984f5894904 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java @@ -99,10 +99,10 @@ */ public class AggregationsTests extends ESTestCase { - private static final List aggsTests = getAggsTests(); + private static final List> aggsTests = getAggsTests(); - private static List getAggsTests() { - List aggsTests = new ArrayList<>(); + private static List> getAggsTests() { + List> aggsTests = new ArrayList<>(); aggsTests.add(new InternalCardinalityTests()); aggsTests.add(new InternalTDigestPercentilesTests()); aggsTests.add(new InternalTDigestPercentilesRanksTests()); @@ -156,11 +156,11 @@ protected NamedXContentRegistry xContentRegistry() { @Before public void init() throws Exception { - for (InternalAggregationTestCase aggsTest : aggsTests) { + for (InternalAggregationTestCase aggsTest : aggsTests) { if (aggsTest instanceof InternalMultiBucketAggregationTestCase) { // Lower down the number of buckets generated by multi bucket aggregation tests in // order to avoid too many aggregations to be created. - ((InternalMultiBucketAggregationTestCase) aggsTest).setMaxNumberOfBuckets(3); + ((InternalMultiBucketAggregationTestCase) aggsTest).setMaxNumberOfBuckets(3); } aggsTest.setUp(); } @@ -168,7 +168,7 @@ public void init() throws Exception { @After public void cleanUp() throws Exception { - for (InternalAggregationTestCase aggsTest : aggsTests) { + for (InternalAggregationTestCase aggsTest : aggsTests) { aggsTest.tearDown(); } } @@ -268,9 +268,9 @@ private static InternalAggregations createTestInstance(final int minNumAggs, fin int numAggs = randomIntBetween(minNumAggs, 4); List aggs = new ArrayList<>(numAggs); for (int i = 0; i < numAggs; i++) { - InternalAggregationTestCase testCase = randomFrom(aggsTests); + InternalAggregationTestCase testCase = randomFrom(aggsTests); if (testCase instanceof InternalMultiBucketAggregationTestCase) { - InternalMultiBucketAggregationTestCase multiBucketAggTestCase = (InternalMultiBucketAggregationTestCase) testCase; + InternalMultiBucketAggregationTestCase multiBucketAggTestCase = (InternalMultiBucketAggregationTestCase) testCase; if (currentDepth < maxDepth) { multiBucketAggTestCase.setSubAggregationsSupplier( () -> createTestInstance(0, currentDepth + 1, maxDepth) @@ -281,7 +281,7 @@ private static InternalAggregations createTestInstance(final int minNumAggs, fin ); } } else if (testCase instanceof InternalSingleBucketAggregationTestCase) { - InternalSingleBucketAggregationTestCase singleBucketAggTestCase = (InternalSingleBucketAggregationTestCase) testCase; + InternalSingleBucketAggregationTestCase singleBucketAggTestCase = (InternalSingleBucketAggregationTestCase) testCase; if (currentDepth < maxDepth) { singleBucketAggTestCase.subAggregationsSupplier = () -> createTestInstance(0, currentDepth + 1, maxDepth); } else { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java index b8b33b97e4d00..c770bef7df613 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java @@ -69,23 +69,23 @@ protected Map, Object>> pluginScripts() { scripts.put("_value", vars -> vars.get("_value")); scripts.put("doc['str_value'].value", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get("str_value"); }); scripts.put("doc['str_values'].values", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Strings strValue = (ScriptDocValues.Strings) doc.get("str_values"); return strValue.getValues(); }); scripts.put("doc[' + singleNumericField() + '].value", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc =(Map) vars.get("doc"); return doc.get(singleNumericField()); }); scripts.put("doc[' + multiNumericField(false) + '].values", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc =(Map) vars.get("doc"); return ((ScriptDocValues) doc.get(multiNumericField(false))).getValues(); }); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 0717e1be2121e..a3fff7f9d5bcc 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -890,7 +890,7 @@ public void testSkipDuplicates() throws Exception { assertSuggestions(searchResponse, true, "suggestions", expected); } - public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) { + public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) { SearchResponse searchResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion(suggestionName, suggestBuilder)).execute().actionGet(); assertSuggestions(searchResponse, suggestionName, suggestions); } @@ -971,7 +971,7 @@ private void createIndexAndMappingAndSettings(Settings settings, CompletionMappi if (completionMappingBuilder.contextMappings != null) { mapping = mapping.startArray("contexts"); - for (Map.Entry contextMapping : completionMappingBuilder.contextMappings.entrySet()) { + for (Map.Entry> contextMapping : completionMappingBuilder.contextMappings.entrySet()) { mapping = mapping.startObject() .field("name", contextMapping.getValue().name()) .field("type", contextMapping.getValue().type().name()); @@ -1189,7 +1189,7 @@ static class CompletionMappingBuilder { String indexAnalyzer = "simple"; Boolean preserveSeparators = random().nextBoolean(); Boolean preservePositionIncrements = random().nextBoolean(); - LinkedHashMap contextMappings = null; + LinkedHashMap> contextMappings = null; public CompletionMappingBuilder searchAnalyzer(String searchAnalyzer) { this.searchAnalyzer = searchAnalyzer; @@ -1208,7 +1208,7 @@ public CompletionMappingBuilder preservePositionIncrements(Boolean preservePosit return this; } - public CompletionMappingBuilder context(LinkedHashMap contextMappings) { + public CompletionMappingBuilder context(LinkedHashMap> contextMappings) { this.contextMappings = contextMappings; return this; } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index 13f7e55277cc4..00defee8daaf4 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; @@ -68,7 +69,7 @@ protected int numberOfReplicas() { } public void testContextPrefix() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); boolean addAnotherContext = randomBoolean(); if (addAnotherContext) { @@ -99,7 +100,7 @@ public void testContextPrefix() throws Exception { } public void testContextRegex() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); boolean addAnotherContext = randomBoolean(); if (addAnotherContext) { @@ -130,7 +131,7 @@ public void testContextRegex() throws Exception { } public void testContextFuzzy() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); boolean addAnotherContext = randomBoolean(); if (addAnotherContext) { @@ -162,7 +163,7 @@ public void testContextFuzzy() throws Exception { public void testContextFilteringWorksWithUTF8Categories() throws Exception { CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); + LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); IndexResponse indexResponse = client().prepareIndex(INDEX, TYPE, "1") @@ -183,7 +184,7 @@ public void testContextFilteringWorksWithUTF8Categories() throws Exception { public void testSingleContextFiltering() throws Exception { CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); + LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); int numDocs = 10; @@ -209,7 +210,7 @@ public void testSingleContextFiltering() throws Exception { public void testSingleContextBoosting() throws Exception { CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); + LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); int numDocs = 10; @@ -237,7 +238,7 @@ public void testSingleContextBoosting() throws Exception { public void testSingleContextMultipleContexts() throws Exception { CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); + LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); int numDocs = 10; @@ -262,7 +263,7 @@ public void testSingleContextMultipleContexts() throws Exception { } public void testMultiContextFiltering() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); map.put("type", ContextBuilder.category("type").field("type").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); @@ -306,7 +307,7 @@ public void testMultiContextFiltering() throws Exception { @AwaitsFix(bugUrl = "multiple context boosting is broken, as a suggestion, contexts pair is treated as (num(context) entries)") public void testMultiContextBoosting() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); map.put("type", ContextBuilder.category("type").field("type").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); @@ -361,7 +362,7 @@ public void testMultiContextBoosting() throws Exception { } public void testMissingContextValue() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); map.put("type", ContextBuilder.category("type").field("type").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); @@ -391,7 +392,7 @@ public void testMissingContextValue() throws Exception { } public void testSeveralContexts() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); final int numContexts = randomIntBetween(2, 5); for (int i = 0; i < numContexts; i++) { map.put("type" + i, ContextBuilder.category("type" + i).field("type" + i).build()); @@ -421,7 +422,7 @@ public void testSeveralContexts() throws Exception { } public void testSimpleGeoPrefix() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -446,7 +447,7 @@ public void testSimpleGeoPrefix() throws Exception { } public void testGeoFiltering() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -478,7 +479,7 @@ public void testGeoFiltering() throws Exception { } public void testGeoBoosting() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -511,7 +512,7 @@ public void testGeoBoosting() throws Exception { } public void testGeoPointContext() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -551,7 +552,7 @@ public void testGeoNeighbours() throws Exception { neighbours.add("gcpu"); neighbours.add("u10h"); - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").precision(4).build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -640,7 +641,7 @@ public void testGeoField() throws Exception { } public void testSkipDuplicatesWithContexts() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("type", ContextBuilder.category("type").field("type").build()); map.put("cat", ContextBuilder.category("cat").field("cat").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); @@ -706,7 +707,7 @@ private void createIndexAndMappingAndSettings(Settings settings, CompletionMappi List categoryContextFields = new ArrayList<>(); if (completionMappingBuilder.contextMappings != null) { mapping.startArray("contexts"); - for (Map.Entry contextMapping : completionMappingBuilder.contextMappings.entrySet()) { + for (Map.Entry> contextMapping : completionMappingBuilder.contextMappings.entrySet()) { mapping.startObject() .field("name", contextMapping.getValue().name()) .field("type", contextMapping.getValue().type().name()); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 862916890e1bb..37fdb7e0aa08b 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -48,7 +48,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe private static final Map> contextMap = new HashMap<>(); private static String categoryContextName; private static String geoQueryContextName; - private static List contextMappings = new ArrayList<>(); + private static List> contextMappings = new ArrayList<>(); @Override protected CompletionSuggestionBuilder randomSuggestionBuilder() { diff --git a/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java b/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java index 7213d7bf9802f..5842b179078d0 100644 --- a/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java +++ b/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java @@ -19,13 +19,13 @@ package org.elasticsearch.test.hamcrest; +import org.elasticsearch.common.geo.GeoDistance; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.parsers.ShapeParser; -import org.locationtech.spatial4j.shape.Shape; -import org.locationtech.spatial4j.shape.ShapeCollection; -import org.locationtech.spatial4j.shape.impl.GeoCircle; -import org.locationtech.spatial4j.shape.impl.RectangleImpl; -import org.locationtech.spatial4j.shape.jts.JtsGeometry; -import org.locationtech.spatial4j.shape.jts.JtsPoint; +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.common.xcontent.XContentParser; +import org.hamcrest.Matcher; +import org.junit.Assert; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.LineString; @@ -33,12 +33,12 @@ import org.locationtech.jts.geom.MultiPoint; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Polygon; -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.xcontent.XContentParser; -import org.hamcrest.Matcher; -import org.junit.Assert; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeCollection; +import org.locationtech.spatial4j.shape.impl.GeoCircle; +import org.locationtech.spatial4j.shape.impl.RectangleImpl; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; +import org.locationtech.spatial4j.shape.jts.JtsPoint; import java.util.Arrays; import java.util.Collections; @@ -208,9 +208,9 @@ public static void assertEquals(Shape s1, Shape s2) { } else if (s1 instanceof ShapeCollection && s2 instanceof ShapeCollection) { assertEquals((ShapeCollection)s1, (ShapeCollection)s2); } else if (s1 instanceof GeoCircle && s2 instanceof GeoCircle) { - Assert.assertEquals((GeoCircle)s1, (GeoCircle)s2); + Assert.assertEquals(s1, s2); } else if (s1 instanceof RectangleImpl && s2 instanceof RectangleImpl) { - Assert.assertEquals((RectangleImpl)s1, (RectangleImpl)s2); + Assert.assertEquals(s1, s2); } else { //We want to know the type of the shape because we test shape equality in a special way... //... in particular we test that one ring is equivalent to another ring even if the points are rotated or reversed. @@ -254,7 +254,7 @@ private static double distance(double lat1, double lon1, double lat2, double lon return GeoDistance.ARC.calculate(lat1, lon1, lat2, lon2, DistanceUnit.DEFAULT); } - public static void assertValidException(XContentParser parser, Class expectedException) { + public static void assertValidException(XContentParser parser, Class expectedException) { try { ShapeParser.parse(parser).build(); Assert.fail("process completed successfully when " + expectedException.getName() + " expected"); diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index c4bf2518a9f8f..35dac2e99e00d 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -20,6 +20,7 @@ package org.elasticsearch.bootstrap; import com.carrotsearch.randomizedtesting.RandomizedRunner; + import org.apache.logging.log4j.Logger; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.common.Booleans; @@ -175,7 +176,7 @@ public boolean implies(ProtectionDomain domain, Permission permission) { /** Add the codebase url of the given classname to the codebases map, if the class exists. */ private static void addClassCodebase(Map codebases, String name, String classname) { try { - Class clazz = BootstrapForTesting.class.getClassLoader().loadClass(classname); + Class clazz = BootstrapForTesting.class.getClassLoader().loadClass(classname); if (codebases.put(name, clazz.getProtectionDomain().getCodeSource().getLocation()) != null) { throw new IllegalStateException("Already added " + name + " codebase for testing"); } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 0b676e1403481..dd8dd5f81ffc9 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -823,7 +823,7 @@ public void handleException(TransportException exp) { }); try { - StringMessageResponse message = res.txGet(); + res.txGet(); fail("exception should be thrown"); } catch (Exception e) { assertThat(e, instanceOf(ReceiveTimeoutTransportException.class)); @@ -939,8 +939,8 @@ public void handleException(TransportException exp) { } public void testTracerLog() throws InterruptedException { - TransportRequestHandler handler = (request, channel, task) -> channel.sendResponse(new StringMessageResponse("")); - TransportRequestHandler handlerWithError = new TransportRequestHandler() { + TransportRequestHandler handler = (request, channel, task) -> channel.sendResponse(new StringMessageResponse("")); + TransportRequestHandler handlerWithError = new TransportRequestHandler() { @Override public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) throws Exception { if (request.timeout() > 0) { @@ -952,7 +952,7 @@ public void messageReceived(StringMessageRequest request, TransportChannel chann }; final Semaphore requestCompleted = new Semaphore(0); - TransportResponseHandler noopResponseHandler = new TransportResponseHandler() { + TransportResponseHandler noopResponseHandler = new TransportResponseHandler() { @Override public StringMessageResponse newInstance() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 0bf6601593dee..d3ddac3289999 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core; import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.MetaData; @@ -157,7 +158,6 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPlugin { - @SuppressWarnings("OptionalUsedAsFieldOrParameterType") static Optional X_PACK_FEATURE = Optional.of("x-pack"); @Override @@ -205,7 +205,7 @@ static Settings additionalSettings(final Settings settings, final boolean enable } @Override - public List getClientActions() { + public List> getClientActions() { return Arrays.asList( // deprecation DeprecationInfoAction.INSTANCE, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index 920081572cfc7..d14c72383d6a8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -9,9 +9,9 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.SpecialPermission; import org.elasticsearch.Version; +import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; @@ -261,8 +261,8 @@ public Collection createComponents(Client client, ClusterService cluster } @Override - public List getClientActions() { - List actions = new ArrayList<>(); + public List> getClientActions() { + List> actions = new ArrayList<>(); actions.addAll(licensing.getClientActions()); actions.addAll(super.getClientActions()); return actions; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionFactory.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionFactory.java index 3618b2de4080b..8c6d82f718735 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionFactory.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionFactory.java @@ -24,5 +24,6 @@ protected ActionFactory(Logger actionLogger) { /** * Parses the given xcontent and creates a concrete action */ - public abstract ExecutableAction parseExecutable(String watchId, String actionId, XContentParser parser) throws IOException; + public abstract ExecutableAction parseExecutable(String watchId, String actionId, XContentParser parser) + throws IOException; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapper.java index 47d3500f2e920..f2cdc63c6e94c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapper.java @@ -40,14 +40,14 @@ public class ActionWrapper implements ToXContentObject { @Nullable private final ExecutableCondition condition; @Nullable - private final ExecutableTransform transform; + private final ExecutableTransform transform; private final ActionThrottler throttler; - private final ExecutableAction action; + private final ExecutableAction action; public ActionWrapper(String id, ActionThrottler throttler, @Nullable ExecutableCondition condition, - @Nullable ExecutableTransform transform, - ExecutableAction action) { + @Nullable ExecutableTransform transform, + ExecutableAction action) { this.id = id; this.condition = condition; this.throttler = throttler; @@ -63,7 +63,7 @@ public ExecutableCondition condition() { return condition; } - public ExecutableTransform transform() { + public ExecutableTransform transform() { return transform; } @@ -71,7 +71,7 @@ public Throttler throttler() { return throttler; } - public ExecutableAction action() { + public ExecutableAction action() { return action; } @@ -196,9 +196,9 @@ static ActionWrapper parse(String watchId, String actionId, XContentParser parse assert parser.currentToken() == XContentParser.Token.START_OBJECT; ExecutableCondition condition = null; - ExecutableTransform transform = null; + ExecutableTransform transform = null; TimeValue throttlePeriod = null; - ExecutableAction action = null; + ExecutableAction action = null; String currentFieldName = null; XContentParser.Token token; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java index 40ea8419765ec..16b62cc23de19 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java @@ -46,7 +46,7 @@ static Aggregations createAggs(List aggsList) { } @SuppressWarnings("unchecked") - static Histogram createHistogramAggregation(String name, List histogramBuckets) { + static Histogram createHistogramAggregation(String name, List histogramBuckets) { Histogram histogram = mock(Histogram.class); when((List)histogram.getBuckets()).thenReturn(histogramBuckets); when(histogram.getName()).thenReturn(name); @@ -72,7 +72,7 @@ static NumericMetricsAggregation.SingleValue createSingleValue(String name, doub static Terms createTerms(String name, Term... terms) { Terms termsAgg = mock(Terms.class); when(termsAgg.getName()).thenReturn(name); - List buckets = new ArrayList<>(); + List buckets = new ArrayList<>(); for (Term term: terms) { StringTerms.Bucket bucket = mock(StringTerms.Bucket.class); when(bucket.getKey()).thenReturn(term.key); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java index e3d67bb0bdb71..9e8d17e84b44a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java @@ -206,7 +206,7 @@ public void testDedicatedMlNode() throws Exception { assertBusy(() -> { ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTask task = tasks.getTask(MlMetadata.jobTaskId(jobId)); + PersistentTask task = tasks.getTask(MlMetadata.jobTaskId(jobId)); DiscoveryNode node = clusterState.nodes().resolveNode(task.getExecutorNode()); assertThat(node.getAttributes(), hasEntry(MachineLearning.ML_ENABLED_NODE_ATTR, "true")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java index 47a168aefad6b..f5a4e34bc67ec 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java @@ -172,7 +172,7 @@ void finishMock() { if (responses.size() > 0) { ActionFuture first = wrapResponse(responses.get(0)); if (responses.size() > 1) { - List rest = new ArrayList<>(); + List> rest = new ArrayList<>(); for (int i = 1; i < responses.size(); ++i) { rest.add(wrapResponse(responses.get(i))); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java index cedc65c2ee225..57e5f6cfdb3ff 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java @@ -229,6 +229,7 @@ private AutodetectProcess mockAutodetectProcessWithOutputStream() throws IOExcep return process; } + @SuppressWarnings("unchecked") private AutodetectCommunicator createAutodetectCommunicator(ExecutorService executorService, AutodetectProcess autodetectProcess, AutoDetectResultProcessor autoDetectResultProcessor, Consumer finishHandler) throws IOException { @@ -242,12 +243,13 @@ private AutodetectCommunicator createAutodetectCommunicator(ExecutorService exec new NamedXContentRegistry(Collections.emptyList()), executorService); } + @SuppressWarnings("unchecked") private AutodetectCommunicator createAutodetectCommunicator(AutodetectProcess autodetectProcess, AutoDetectResultProcessor autoDetectResultProcessor) throws IOException { ExecutorService executorService = mock(ExecutorService.class); when(executorService.submit(any(Callable.class))).thenReturn(mock(Future.class)); doAnswer(invocationOnMock -> { - Callable runnable = (Callable) invocationOnMock.getArguments()[0]; + Callable runnable = (Callable) invocationOnMock.getArguments()[0]; runnable.call(); return mock(Future.class); }).when(executorService).submit(any(Callable.class)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index fa41cf0918f71..a1b9aad452b9e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; @@ -42,6 +41,7 @@ import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange; @@ -202,6 +202,7 @@ public void testOpenJob() { verify(jobTask).updatePersistentTaskState(eq(new JobTaskState(JobState.OPENED, 1L)), any()); } + @SuppressWarnings("unchecked") public void testOpenJob_exceedMaxNumJobs() { when(jobManager.getJobOrThrowIfUnknown("foo")).thenReturn(createJobDetails("foo")); when(jobManager.getJobOrThrowIfUnknown("bar")).thenReturn(createJobDetails("bar")); @@ -214,7 +215,7 @@ public void testOpenJob_exceedMaxNumJobs() { ThreadPool.Cancellable cancellable = mock(ThreadPool.Cancellable.class); when(threadPool.scheduleWithFixedDelay(any(), any(), any())).thenReturn(cancellable); ExecutorService executorService = mock(ExecutorService.class); - Future future = mock(Future.class); + Future future = mock(Future.class); when(executorService.submit(any(Callable.class))).thenReturn(future); when(threadPool.executor(anyString())).thenReturn(EsExecutors.newDirectExecutorService()); AutodetectProcess autodetectProcess = mock(AutodetectProcess.class); @@ -230,7 +231,6 @@ public void testOpenJob_exceedMaxNumJobs() { doReturn(executorService).when(manager).createAutodetectExecutorService(any()); doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") CheckedConsumer consumer = (CheckedConsumer) invocationOnMock.getArguments()[2]; consumer.accept(null); return null; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 19760ccab0202..09de32643ed93 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -403,7 +403,7 @@ private IllegalArgumentException illegalArgument(String message) { } private static String getAction(BulkItemRequest item) { - final DocWriteRequest docWriteRequest = item.request(); + final DocWriteRequest docWriteRequest = item.request(); switch (docWriteRequest.opType()) { case INDEX: case CREATE: diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index 67e21aadcbceb..5d9176b18976e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -129,14 +129,14 @@ private static boolean isInternalAction(String action) { */ public static class AsyncAuthorizer { - private final ActionListener listener; + private final ActionListener listener; private final BiConsumer consumer; private final Authentication authentication; private volatile Role userRoles; private volatile Role runAsRoles; private CountDown countDown = new CountDown(2); // we expect only two responses!! - public AsyncAuthorizer(Authentication authentication, ActionListener listener, BiConsumer consumer) { + public AsyncAuthorizer(Authentication authentication, ActionListener listener, BiConsumer consumer) { this.consumer = consumer; this.listener = listener; this.authentication = authentication; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java index 77bf8e6a4008e..7d4469133687e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java @@ -37,9 +37,9 @@ import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; +import static org.hamcrest.Matchers.nullValue; public class AuditTrailTests extends SecurityIntegTestCase { @@ -163,7 +163,7 @@ private Collection> getAuditEvents() throws Exception { .request(); request.indicesOptions().ignoreUnavailable(); - final PlainActionFuture>> listener = new PlainActionFuture(); + final PlainActionFuture>> listener = new PlainActionFuture<>(); ScrollHelper.fetchAllByEntity(client, request, listener, SearchHit::getSourceAsMap); return listener.get(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index cd685b8f34c28..bb32ed699950c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -199,7 +199,6 @@ public void shutdownThreadpool() throws InterruptedException { } } - @SuppressWarnings("unchecked") public void testTokenFirstMissingSecondFound() throws Exception { when(firstRealm.token(threadContext)).thenReturn(null); when(secondRealm.token(threadContext)).thenReturn(token); @@ -227,7 +226,6 @@ public void testTokenMissing() throws Exception { verifyNoMoreInteractions(auditTrail); } - @SuppressWarnings("unchecked") public void testAuthenticateBothSupportSecondSucceeds() throws Exception { User user = new User("_username", "r1"); when(firstRealm.supports(token)).thenReturn(true); @@ -698,7 +696,7 @@ public void testRunAsLookupSameRealm() throws Exception { mockAuthenticate(secondRealm, token, user); mockRealmLookupReturnsNull(firstRealm, "run_as"); doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(new User("looked up user", new String[]{"some role"})); return null; }).when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class)); @@ -735,6 +733,7 @@ public void testRunAsLookupSameRealm() throws Exception { assertTrue(completed.get()); } + @SuppressWarnings("unchecked") public void testRunAsLookupDifferentRealm() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); @@ -742,7 +741,7 @@ public void testRunAsLookupDifferentRealm() throws Exception { when(secondRealm.supports(token)).thenReturn(true); mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(new User("looked up user", new String[]{"some role"})); return null; }).when(firstRealm).lookupUser(eq("run_as"), any(ActionListener.class)); @@ -805,6 +804,7 @@ public void testRunAsWithEmptyRunAsUsername() throws Exception { } } + @SuppressWarnings("unchecked") public void testAuthenticateTransportDisabledRunAsUser() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); @@ -813,7 +813,7 @@ public void testAuthenticateTransportDisabledRunAsUser() throws Exception { mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); mockRealmLookupReturnsNull(firstRealm, "run_as"); doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(new User("looked up user", new String[]{"some role"}, null, null, null, false)); return null; }).when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class)); @@ -833,7 +833,8 @@ public void testAuthenticateRestDisabledRunAsUser() throws Exception { mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); mockRealmLookupReturnsNull(firstRealm, "run_as"); doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(new User("looked up user", new String[]{"some role"}, null, null, null, false)); return null; }).when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class)); @@ -984,9 +985,10 @@ void assertThreadContextContainsAuthentication(Authentication authentication) th assertThat(threadContext.getHeader(AuthenticationField.AUTHENTICATION_KEY), equalTo((Object) authentication.encode())); } + @SuppressWarnings("unchecked") private void mockAuthenticate(Realm realm, AuthenticationToken token, User user) { doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; if (user == null) { listener.onResponse(AuthenticationResult.notHandled()); } else { @@ -1008,9 +1010,10 @@ private Authentication authenticateBlocking(String action, TransportMessage mess return future.actionGet(); } + @SuppressWarnings("unchecked") private static void mockRealmLookupReturnsNull(Realm realm, String username) { doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(null); return null; }).when(realm).lookupUser(eq(username), any(ActionListener.class)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index bcd31c32f7f78..11ee0a6a0012e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -168,6 +168,7 @@ public class AuthorizationServiceTests extends ESTestCase { private Map roleMap = new HashMap<>(); private CompositeRolesStore rolesStore; + @SuppressWarnings("unchecked") @Before public void setup() { rolesStore = mock(CompositeRolesStore.class); @@ -208,7 +209,7 @@ public void setup() { } private void authorize(Authentication authentication, String action, TransportRequest request) { - PlainActionFuture future = new PlainActionFuture(); + PlainActionFuture future = new PlainActionFuture<>(); AuthorizationUtils.AsyncAuthorizer authorizer = new AuthorizationUtils.AsyncAuthorizer(authentication, future, (userRoles, runAsRoles) -> { authorizationService.authorize(authentication, action, request, userRoles, runAsRoles); @@ -598,7 +599,6 @@ public void testAuditTrailIsRecordedWhenIndexWildcardThrowsError() { public void testRunAsRequestWithNoRolesUser() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = createAuthentication(new User("run as me", null, new User("test user", "admin"))); - final User user = new User("run as me", null, new User("test user", "admin")); assertNotEquals(authentication.getUser().authenticatedUser(), authentication); assertThrowsAuthorizationExceptionRunAs( () -> authorize(authentication, "indices:a", request), diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java index 460725c3dda98..732653d829307 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java @@ -33,7 +33,7 @@ public InputRegistry(Settings settings, Map factories) { * @param parser The parser containing the input definition * @return A new input instance from the parser */ - public ExecutableInput parse(String watchId, XContentParser parser) throws IOException { + public ExecutableInput parse(String watchId, XContentParser parser) throws IOException { String type = null; if (parser.currentToken() != XContentParser.Token.START_OBJECT) { @@ -42,7 +42,7 @@ public ExecutableInput parse(String watchId, XContentParser parser) throws IOExc } XContentParser.Token token; - ExecutableInput input = null; + ExecutableInput input = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { type = parser.currentName(); @@ -50,7 +50,7 @@ public ExecutableInput parse(String watchId, XContentParser parser) throws IOExc throw new ElasticsearchParseException("could not parse input for watch [{}]. expected field indicating the input type, " + "but found [{}] instead", watchId, token); } else if (token == XContentParser.Token.START_OBJECT) { - InputFactory factory = factories.get(type); + InputFactory factory = factories.get(type); if (factory == null) { throw new ElasticsearchParseException("could not parse input for watch [{}]. unknown input type [{}]", watchId, type); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java index 7754e622d5a6b..a81868f05edfc 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.actions.Action; import org.elasticsearch.xpack.core.watcher.actions.ActionStatus; +import org.elasticsearch.xpack.core.watcher.actions.ActionStatus.AckStatus.State; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapperResult; import org.elasticsearch.xpack.core.watcher.actions.ExecutableAction; @@ -22,7 +23,6 @@ import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.xpack.core.watcher.actions.ActionStatus.AckStatus.State; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; @@ -32,7 +32,8 @@ public class ActionWrapperTests extends ESTestCase { private DateTime now = DateTime.now(DateTimeZone.UTC); private Watch watch = mock(Watch.class); - private ExecutableAction executableAction = mock(ExecutableAction.class); + @SuppressWarnings("unchecked") + private ExecutableAction executableAction = mock(ExecutableAction.class); private ActionWrapper actionWrapper = new ActionWrapper("_action", null, NeverCondition.INSTANCE, null, executableAction); public void testThatUnmetActionConditionResetsAckStatus() throws Exception { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java index bc22d58917931..05256ba5fc476 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java @@ -57,7 +57,7 @@ public void testSingleActionAckThrottle() throws Exception { .trigger(schedule(interval("60m"))); AvailableAction availableAction = randomFrom(AvailableAction.values()); - Action.Builder action = availableAction.action(); + Action.Builder action = availableAction.action(); watchSourceBuilder.addAction("test_id", action); watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder.buildAsBytes(XContentType.JSON), @@ -98,7 +98,7 @@ public void testRandomMultiActionAckThrottle() throws Exception { Set ackingActions = new HashSet<>(); for (int i = 0; i < scaledRandomIntBetween(5,10); ++i) { AvailableAction availableAction = randomFrom(AvailableAction.values()); - Action.Builder action = availableAction.action(); + Action.Builder action = availableAction.action(); watchSourceBuilder.addAction("test_id" + i, action); if (randomBoolean()) { ackingActions.add("test_id" + i); @@ -352,7 +352,7 @@ public void testFailingActionDoesGetThrottled() throws Exception { enum AvailableAction { EMAIL { @Override - public Action.Builder action() throws Exception { + public Action.Builder action() throws Exception { EmailTemplate.Builder emailBuilder = EmailTemplate.builder(); emailBuilder.from("test@test.com"); emailBuilder.to("test@test.com"); @@ -367,7 +367,7 @@ public String type() { }, WEBHOOK { @Override - public Action.Builder action() throws Exception { + public Action.Builder action() throws Exception { HttpRequestTemplate.Builder requestBuilder = HttpRequestTemplate.builder("localhost", 1234) .path("/") .method(HttpMethod.GET); @@ -381,7 +381,7 @@ public String type() { }, LOGGING { @Override - public Action.Builder action() throws Exception { + public Action.Builder action() throws Exception { return LoggingAction.builder(new TextTemplate("_logging")); } @@ -392,7 +392,7 @@ public String type() { }, INDEX { @Override - public Action.Builder action() throws Exception { + public Action.Builder action() throws Exception { return IndexAction.builder("test_index", "test_type"); } @@ -402,7 +402,7 @@ public String type() { } }; - public abstract Action.Builder action() throws Exception; + public abstract Action.Builder action() throws Exception; public abstract String type(); }