diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index 09c587cf81f23..81b1f6b570969 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -28,6 +28,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.client.RequestConverters.EndpointBuilder; import org.elasticsearch.client.ml.CloseJobRequest; +import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; import org.elasticsearch.client.ml.FlushJobRequest; @@ -195,6 +196,19 @@ static Request putDatafeed(PutDatafeedRequest putDatafeedRequest) throws IOExcep return request; } + static Request deleteDatafeed(DeleteDatafeedRequest deleteDatafeedRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("datafeeds") + .addPathPart(deleteDatafeedRequest.getDatafeedId()) + .build(); + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + RequestConverters.Params params = new RequestConverters.Params(request); + params.putParam("force", Boolean.toString(deleteDatafeedRequest.isForce())); + return request; + } + static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) throws IOException { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index 79f9267c94d18..4d2167ce063d8 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -22,9 +22,9 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobResponse; +import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; -import org.elasticsearch.client.ml.DeleteJobResponse; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.ForecastJobRequest; @@ -204,11 +204,11 @@ public void getJobStatsAsync(GetJobStatsRequest request, RequestOptions options, * @return action acknowledgement * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException { + public AcknowledgedResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, MLRequestConverters::deleteJob, options, - DeleteJobResponse::fromXContent, + AcknowledgedResponse::fromXContent, Collections.emptySet()); } @@ -222,11 +222,11 @@ public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions opti * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion */ - public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener listener) { + public void deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(request, MLRequestConverters::deleteJob, options, - DeleteJobResponse::fromXContent, + AcknowledgedResponse::fromXContent, listener, Collections.emptySet()); } @@ -492,6 +492,46 @@ public void putDatafeedAsync(PutDatafeedRequest request, RequestOptions options, Collections.emptySet()); } + /** + * Deletes the given Machine Learning Datafeed + *

+ * For additional info + * see + * ML Delete Datafeed documentation + *

+ * @param request The request to delete the datafeed + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return action acknowledgement + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public AcknowledgedResponse deleteDatafeed(DeleteDatafeedRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::deleteDatafeed, + options, + AcknowledgedResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Deletes the given Machine Learning Datafeed asynchronously and notifies the listener on completion + *

+ * For additional info + * see + * ML Delete Datafeed documentation + *

+ * @param request The request to delete the datafeed + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void deleteDatafeedAsync(DeleteDatafeedRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::deleteDatafeed, + options, + AcknowledgedResponse::fromXContent, + listener, + Collections.emptySet()); + } + /** * Deletes Machine Learning Job Forecasts * diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java new file mode 100644 index 0000000000000..1454bb590c3b9 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; + +import java.util.Objects; + +/** + * Request to delete a Machine Learning Datafeed via its ID + */ +public class DeleteDatafeedRequest extends ActionRequest { + + private String datafeedId; + private boolean force; + + public DeleteDatafeedRequest(String datafeedId) { + this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null"); + } + + public String getDatafeedId() { + return datafeedId; + } + + public boolean isForce() { + return force; + } + + /** + * Used to forcefully delete a started datafeed. + * This method is quicker than stopping and deleting the datafeed. + * + * @param force When {@code true} forcefully delete a started datafeed. Defaults to {@code false} + */ + public void setForce(boolean force) { + this.force = force; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public int hashCode() { + return Objects.hash(datafeedId, force); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || obj.getClass() != getClass()) { + return false; + } + + DeleteDatafeedRequest other = (DeleteDatafeedRequest) obj; + return Objects.equals(datafeedId, other.datafeedId) && Objects.equals(force, other.force); + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java deleted file mode 100644 index 86cafd9e09315..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response acknowledging the Machine Learning Job request - */ -public class DeleteJobResponse extends AcknowledgedResponse { - - public DeleteJobResponse(boolean acknowledged) { - super(acknowledged); - } - - public DeleteJobResponse() { - } - - public static DeleteJobResponse fromXContent(XContentParser parser) throws IOException { - AcknowledgedResponse response = AcknowledgedResponse.fromXContent(parser); - return new DeleteJobResponse(response.isAcknowledged()); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteJobResponse that = (DeleteJobResponse) other; - return isAcknowledged() == that.isAcknowledged(); - } - - @Override - public int hashCode() { - return Objects.hash(isAcknowledged()); - } - -} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index 19db672e35bcc..547bc2e9a934f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -24,6 +24,7 @@ import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.ml.CloseJobRequest; +import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; import org.elasticsearch.client.ml.FlushJobRequest; @@ -223,6 +224,20 @@ public void testPutDatafeed() throws IOException { } } + public void testDeleteDatafeed() { + String datafeedId = randomAlphaOfLength(10); + DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId); + + Request request = MLRequestConverters.deleteDatafeed(deleteDatafeedRequest); + assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/datafeeds/" + datafeedId, request.getEndpoint()); + assertEquals(Boolean.toString(false), request.getParameters().get("force")); + + deleteDatafeedRequest.setForce(true); + request = MLRequestConverters.deleteDatafeed(deleteDatafeedRequest); + assertEquals(Boolean.toString(true), request.getParameters().get("force")); + } + public void testDeleteForecast() throws Exception { String jobId = randomAlphaOfLength(10); DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest(jobId); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index c0bf1055058a5..a07b441484386 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -25,9 +25,9 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobResponse; +import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; -import org.elasticsearch.client.ml.DeleteJobResponse; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.ForecastJobRequest; @@ -129,7 +129,7 @@ public void testDeleteJob() throws Exception { MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); - DeleteJobResponse response = execute(new DeleteJobRequest(jobId), + AcknowledgedResponse response = execute(new DeleteJobRequest(jobId), machineLearningClient::deleteJob, machineLearningClient::deleteJobAsync); @@ -312,6 +312,22 @@ public void testPutDatafeed() throws Exception { assertThat(createdDatafeed.getIndices(), equalTo(datafeedConfig.getIndices())); } + public void testDeleteDatafeed() throws Exception { + String jobId = randomValidJobId(); + Job job = buildJob(jobId); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + String datafeedId = "datafeed-" + jobId; + DatafeedConfig datafeedConfig = DatafeedConfig.builder(datafeedId, jobId).setIndices("some_data_index").build(); + execute(new PutDatafeedRequest(datafeedConfig), machineLearningClient::putDatafeed, machineLearningClient::putDatafeedAsync); + + AcknowledgedResponse response = execute(new DeleteDatafeedRequest(datafeedId), machineLearningClient::deleteDatafeed, + machineLearningClient::deleteDatafeedAsync); + + assertTrue(response.isAcknowledged()); + } + public void testDeleteForecast() throws Exception { String jobId = "test-delete-forecast"; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 3e43792ac6a5a..09d32710eb176 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -34,9 +34,9 @@ import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobResponse; +import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; -import org.elasticsearch.client.ml.DeleteJobResponse; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.ForecastJobRequest; @@ -264,7 +264,7 @@ public void testDeleteJob() throws Exception { //tag::x-pack-delete-ml-job-request DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job"); deleteJobRequest.setForce(false); //<1> - DeleteJobResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT); + AcknowledgedResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT); //end::x-pack-delete-ml-job-request //tag::x-pack-delete-ml-job-response @@ -273,9 +273,9 @@ public void testDeleteJob() throws Exception { } { //tag::x-pack-delete-ml-job-request-listener - ActionListener listener = new ActionListener() { + ActionListener listener = new ActionListener() { @Override - public void onResponse(DeleteJobResponse deleteJobResponse) { + public void onResponse(AcknowledgedResponse acknowledgedResponse) { // <1> } @@ -587,6 +587,61 @@ public void onFailure(Exception e) { } } + public void testDeleteDatafeed() throws Exception { + RestHighLevelClient client = highLevelClient(); + + String jobId = "test-delete-datafeed-job"; + Job job = MachineLearningIT.buildJob(jobId); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + String datafeedId = "test-delete-datafeed"; + DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId).setIndices("foo").build(); + client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); + + { + //tag::x-pack-delete-ml-datafeed-request + DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId); + deleteDatafeedRequest.setForce(false); //<1> + AcknowledgedResponse deleteDatafeedResponse = client.machineLearning().deleteDatafeed( + deleteDatafeedRequest, RequestOptions.DEFAULT); + //end::x-pack-delete-ml-datafeed-request + + //tag::x-pack-delete-ml-datafeed-response + boolean isAcknowledged = deleteDatafeedResponse.isAcknowledged(); //<1> + //end::x-pack-delete-ml-datafeed-response + } + + // Recreate datafeed to allow second deletion + client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); + + { + //tag::x-pack-delete-ml-datafeed-request-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + //end::x-pack-delete-ml-datafeed-request-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + //tag::x-pack-delete-ml-datafeed-request-async + DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId); + client.machineLearning().deleteDatafeedAsync(deleteDatafeedRequest, RequestOptions.DEFAULT, listener); // <1> + //end::x-pack-delete-ml-datafeed-request-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testGetBuckets() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteDatafeedRequestTests.java similarity index 51% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobResponseTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteDatafeedRequestTests.java index 2eb4d51e19180..e36aa855a7b59 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteJobResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/DeleteDatafeedRequestTests.java @@ -18,25 +18,25 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests; +import org.elasticsearch.test.ESTestCase; -import java.io.IOException; +public class DeleteDatafeedRequestTests extends ESTestCase { -public class DeleteJobResponseTests extends AbstractXContentTestCase { - - @Override - protected DeleteJobResponse createTestInstance() { - return new DeleteJobResponse(); + public void testConstructor_GivenNullId() { + NullPointerException ex = expectThrows(NullPointerException.class, () -> new DeleteJobRequest(null)); + assertEquals("[job_id] must not be null", ex.getMessage()); } - @Override - protected DeleteJobResponse doParseInstance(XContentParser parser) throws IOException { - return DeleteJobResponse.fromXContent(parser); + public void testSetForce() { + DeleteDatafeedRequest deleteDatafeedRequest = createTestInstance(); + assertFalse(deleteDatafeedRequest.isForce()); + + deleteDatafeedRequest.setForce(true); + assertTrue(deleteDatafeedRequest.isForce()); } - @Override - protected boolean supportsUnknownFields() { - return false; + private DeleteDatafeedRequest createTestInstance() { + return new DeleteDatafeedRequest(DatafeedConfigTests.randomValidDatafeedId()); } } diff --git a/docs/java-rest/high-level/ml/delete-datafeed.asciidoc b/docs/java-rest/high-level/ml/delete-datafeed.asciidoc new file mode 100644 index 0000000000000..68741651b33d4 --- /dev/null +++ b/docs/java-rest/high-level/ml/delete-datafeed.asciidoc @@ -0,0 +1,49 @@ +[[java-rest-high-x-pack-ml-delete-datafeed]] +=== Delete Datafeed API + +[[java-rest-high-x-pack-machine-learning-delete-datafeed-request]] +==== Delete Datafeed Request + +A `DeleteDatafeedRequest` object requires a non-null `datafeedId` and can optionally set `force`. +Can be executed as follows: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-request] +--------------------------------------------------- +<1> Use to forcefully delete a started datafeed; +this method is quicker than stopping and deleting the datafeed. +Defaults to `false`. + +[[java-rest-high-x-pack-machine-learning-delete-datafeed-response]] +==== Delete Datafeed Response + +The returned `AcknowledgedResponse` object indicates the acknowledgement of the request: +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-response] +--------------------------------------------------- +<1> `isAcknowledged` was the deletion request acknowledged or not + +[[java-rest-high-x-pack-machine-learning-delete-datafeed-async]] +==== Delete Datafeed Asynchronously + +This request can also be made asynchronously. +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-request-async] +--------------------------------------------------- +<1> The `DeleteDatafeedRequest` to execute and the `ActionListener` to alert on completion or error. + +The deletion request returns immediately. Once the request is completed, the `ActionListener` is +called back using the `onResponse` or `onFailure`. The latter indicates some failure occurred when +making the request. + +A typical listener for a `DeleteDatafeedRequest` could be defined as follows: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-datafeed-request-listener] +--------------------------------------------------- +<1> The action to be taken when it is completed +<2> What to do when a failure occurs diff --git a/docs/java-rest/high-level/ml/delete-job.asciidoc b/docs/java-rest/high-level/ml/delete-job.asciidoc index 44a6a47940955..43f1e2fb02bbf 100644 --- a/docs/java-rest/high-level/ml/delete-job.asciidoc +++ b/docs/java-rest/high-level/ml/delete-job.asciidoc @@ -18,7 +18,7 @@ Defaults to `false` [[java-rest-high-x-pack-machine-learning-delete-job-response]] ==== Delete Job Response -The returned `DeleteJobResponse` object indicates the acknowledgement of the request: +The returned `AcknowledgedResponse` object indicates the acknowledgement of the request: ["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-delete-ml-job-response] diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 0be681a14d1fc..cb297d0f712dc 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -221,6 +221,7 @@ The Java High Level REST Client supports the following Machine Learning APIs: * <> * <> * <> +* <> * <> * <> * <> @@ -238,6 +239,7 @@ include::ml/close-job.asciidoc[] include::ml/update-job.asciidoc[] include::ml/flush-job.asciidoc[] include::ml/put-datafeed.asciidoc[] +include::ml/delete-datafeed.asciidoc[] include::ml/get-job-stats.asciidoc[] include::ml/forecast-job.asciidoc[] include::ml/delete-forecast.asciidoc[] diff --git a/docs/reference/images/sql/client-apps/dbeaver-1-new-conn.png b/docs/reference/images/sql/client-apps/dbeaver-1-new-conn.png new file mode 100644 index 0000000000000..2307f03932663 Binary files /dev/null and b/docs/reference/images/sql/client-apps/dbeaver-1-new-conn.png differ diff --git a/docs/reference/images/sql/client-apps/dbeaver-2-conn-es.png b/docs/reference/images/sql/client-apps/dbeaver-2-conn-es.png new file mode 100644 index 0000000000000..1ca209a57e555 Binary files /dev/null and b/docs/reference/images/sql/client-apps/dbeaver-2-conn-es.png differ diff --git a/docs/reference/images/sql/client-apps/dbeaver-3-conn-props.png b/docs/reference/images/sql/client-apps/dbeaver-3-conn-props.png new file mode 100644 index 0000000000000..7561e94bdd991 Binary files /dev/null and b/docs/reference/images/sql/client-apps/dbeaver-3-conn-props.png differ diff --git a/docs/reference/images/sql/client-apps/dbeaver-4-driver-ver.png b/docs/reference/images/sql/client-apps/dbeaver-4-driver-ver.png new file mode 100644 index 0000000000000..62cef87a7ae9d Binary files /dev/null and b/docs/reference/images/sql/client-apps/dbeaver-4-driver-ver.png differ diff --git a/docs/reference/images/sql/client-apps/dbeaver-5-test-conn.png b/docs/reference/images/sql/client-apps/dbeaver-5-test-conn.png new file mode 100644 index 0000000000000..70f2a1dd4dc2f Binary files /dev/null and b/docs/reference/images/sql/client-apps/dbeaver-5-test-conn.png differ diff --git a/docs/reference/images/sql/client-apps/dbeaver-6-data.png b/docs/reference/images/sql/client-apps/dbeaver-6-data.png new file mode 100644 index 0000000000000..5d33441fe3b8c Binary files /dev/null and b/docs/reference/images/sql/client-apps/dbeaver-6-data.png differ diff --git a/docs/reference/images/sql/client-apps/dbvis-1-driver-manager.png b/docs/reference/images/sql/client-apps/dbvis-1-driver-manager.png new file mode 100644 index 0000000000000..b0ff89cc9d75a Binary files /dev/null and b/docs/reference/images/sql/client-apps/dbvis-1-driver-manager.png differ diff --git a/docs/reference/images/sql/client-apps/dbvis-2-driver.png b/docs/reference/images/sql/client-apps/dbvis-2-driver.png new file mode 100644 index 0000000000000..b0f3a2927c968 Binary files /dev/null and b/docs/reference/images/sql/client-apps/dbvis-2-driver.png differ diff --git a/docs/reference/images/sql/client-apps/dbvis-3-new-conn.png b/docs/reference/images/sql/client-apps/dbvis-3-new-conn.png new file mode 100644 index 0000000000000..7f89cf84a8e62 Binary files /dev/null and b/docs/reference/images/sql/client-apps/dbvis-3-new-conn.png differ diff --git a/docs/reference/images/sql/client-apps/dbvis-4-conn-props.png b/docs/reference/images/sql/client-apps/dbvis-4-conn-props.png new file mode 100644 index 0000000000000..2027949c401a7 Binary files /dev/null and b/docs/reference/images/sql/client-apps/dbvis-4-conn-props.png differ diff --git a/docs/reference/images/sql/client-apps/dbvis-5-data.png b/docs/reference/images/sql/client-apps/dbvis-5-data.png new file mode 100644 index 0000000000000..fb5ce8b86aa74 Binary files /dev/null and b/docs/reference/images/sql/client-apps/dbvis-5-data.png differ diff --git a/docs/reference/images/sql/client-apps/squirell-1-view-drivers.png b/docs/reference/images/sql/client-apps/squirell-1-view-drivers.png new file mode 100644 index 0000000000000..22abbbed741ee Binary files /dev/null and b/docs/reference/images/sql/client-apps/squirell-1-view-drivers.png differ diff --git a/docs/reference/images/sql/client-apps/squirell-2-new-driver.png b/docs/reference/images/sql/client-apps/squirell-2-new-driver.png new file mode 100644 index 0000000000000..756b308a6a306 Binary files /dev/null and b/docs/reference/images/sql/client-apps/squirell-2-new-driver.png differ diff --git a/docs/reference/images/sql/client-apps/squirell-3-add-driver.png b/docs/reference/images/sql/client-apps/squirell-3-add-driver.png new file mode 100644 index 0000000000000..9a9c2c2634e3c Binary files /dev/null and b/docs/reference/images/sql/client-apps/squirell-3-add-driver.png differ diff --git a/docs/reference/images/sql/client-apps/squirell-4-driver-list.png b/docs/reference/images/sql/client-apps/squirell-4-driver-list.png new file mode 100644 index 0000000000000..35f389747c970 Binary files /dev/null and b/docs/reference/images/sql/client-apps/squirell-4-driver-list.png differ diff --git a/docs/reference/images/sql/client-apps/squirell-5-add-alias.png b/docs/reference/images/sql/client-apps/squirell-5-add-alias.png new file mode 100644 index 0000000000000..d5587060d2eaa Binary files /dev/null and b/docs/reference/images/sql/client-apps/squirell-5-add-alias.png differ diff --git a/docs/reference/images/sql/client-apps/squirell-6-alias-props.png b/docs/reference/images/sql/client-apps/squirell-6-alias-props.png new file mode 100644 index 0000000000000..a43e5b5be6927 Binary files /dev/null and b/docs/reference/images/sql/client-apps/squirell-6-alias-props.png differ diff --git a/docs/reference/images/sql/client-apps/squirell-7-data.png b/docs/reference/images/sql/client-apps/squirell-7-data.png new file mode 100644 index 0000000000000..760ade7c670fb Binary files /dev/null and b/docs/reference/images/sql/client-apps/squirell-7-data.png differ diff --git a/docs/reference/images/sql/client-apps/workbench-1-manage-drivers.png b/docs/reference/images/sql/client-apps/workbench-1-manage-drivers.png new file mode 100644 index 0000000000000..e305fd2a9dda2 Binary files /dev/null and b/docs/reference/images/sql/client-apps/workbench-1-manage-drivers.png differ diff --git a/docs/reference/images/sql/client-apps/workbench-2-add-driver.png b/docs/reference/images/sql/client-apps/workbench-2-add-driver.png new file mode 100644 index 0000000000000..03e740f400ae1 Binary files /dev/null and b/docs/reference/images/sql/client-apps/workbench-2-add-driver.png differ diff --git a/docs/reference/images/sql/client-apps/workbench-3-connection.png b/docs/reference/images/sql/client-apps/workbench-3-connection.png new file mode 100644 index 0000000000000..32643375e3de9 Binary files /dev/null and b/docs/reference/images/sql/client-apps/workbench-3-connection.png differ diff --git a/docs/reference/images/sql/client-apps/workbench-4-data.png b/docs/reference/images/sql/client-apps/workbench-4-data.png new file mode 100644 index 0000000000000..602f09d06e46f Binary files /dev/null and b/docs/reference/images/sql/client-apps/workbench-4-data.png differ diff --git a/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc b/docs/reference/security/securing-communications/configuring-tls-docker.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc rename to docs/reference/security/securing-communications/configuring-tls-docker.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/enabling-cipher-suites.asciidoc b/docs/reference/security/securing-communications/enabling-cipher-suites.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/enabling-cipher-suites.asciidoc rename to docs/reference/security/securing-communications/enabling-cipher-suites.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/node-certificates.asciidoc b/docs/reference/security/securing-communications/node-certificates.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/node-certificates.asciidoc rename to docs/reference/security/securing-communications/node-certificates.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/securing-elasticsearch.asciidoc b/docs/reference/security/securing-communications/securing-elasticsearch.asciidoc similarity index 84% rename from x-pack/docs/en/security/securing-communications/securing-elasticsearch.asciidoc rename to docs/reference/security/securing-communications/securing-elasticsearch.asciidoc index 09cb118f68466..6b919e065c631 100644 --- a/x-pack/docs/en/security/securing-communications/securing-elasticsearch.asciidoc +++ b/docs/reference/security/securing-communications/securing-elasticsearch.asciidoc @@ -29,17 +29,17 @@ information, see <>. For more information about encrypting communications across the Elastic Stack, see {xpack-ref}/encrypting-communications.html[Encrypting Communications]. -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/securing-communications/node-certificates.asciidoc +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/node-certificates.asciidoc include::node-certificates.asciidoc[] -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/securing-communications/tls-transport.asciidoc +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/tls-transport.asciidoc include::tls-transport.asciidoc[] -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/securing-communications/tls-http.asciidoc +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/tls-http.asciidoc include::tls-http.asciidoc[] -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/securing-communications/tls-ad.asciidoc +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/tls-ad.asciidoc include::tls-ad.asciidoc[] -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/securing-communications/tls-ldap.asciidoc +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/tls-ldap.asciidoc include::tls-ldap.asciidoc[] \ No newline at end of file diff --git a/x-pack/docs/en/security/securing-communications/separating-node-client-traffic.asciidoc b/docs/reference/security/securing-communications/separating-node-client-traffic.asciidoc similarity index 94% rename from x-pack/docs/en/security/securing-communications/separating-node-client-traffic.asciidoc rename to docs/reference/security/securing-communications/separating-node-client-traffic.asciidoc index 887d4701d78e8..e911ad529c418 100644 --- a/x-pack/docs/en/security/securing-communications/separating-node-client-traffic.asciidoc +++ b/docs/reference/security/securing-communications/separating-node-client-traffic.asciidoc @@ -37,7 +37,7 @@ transport.profiles.client.bind_host: 1.1.1.1 <2> <2> The bind address for the network used for client communication If separate networks are not available, then -{xpack-ref}/ip-filtering.html[IP Filtering] can +{stack-ov}/ip-filtering.html[IP Filtering] can be enabled to limit access to the profiles. When using SSL for transport, a different set of certificates can also be used @@ -65,4 +65,4 @@ transport.profiles.client.xpack.security.ssl.client_authentication: none This setting keeps certificate authentication active for node-to-node traffic, but removes the requirement to distribute a signed certificate to transport clients. For more information, see -{xpack-ref}/java-clients.html#transport-client[Configuring the Transport Client to work with a Secured Cluster]. +{stack-ov}/java-clients.html#transport-client[Configuring the Transport Client to work with a Secured Cluster]. diff --git a/x-pack/docs/en/security/securing-communications/setting-up-ssl.asciidoc b/docs/reference/security/securing-communications/setting-up-ssl.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/setting-up-ssl.asciidoc rename to docs/reference/security/securing-communications/setting-up-ssl.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/tls-ad.asciidoc b/docs/reference/security/securing-communications/tls-ad.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/tls-ad.asciidoc rename to docs/reference/security/securing-communications/tls-ad.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/tls-http.asciidoc b/docs/reference/security/securing-communications/tls-http.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/tls-http.asciidoc rename to docs/reference/security/securing-communications/tls-http.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/tls-ldap.asciidoc b/docs/reference/security/securing-communications/tls-ldap.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/tls-ldap.asciidoc rename to docs/reference/security/securing-communications/tls-ldap.asciidoc diff --git a/x-pack/docs/en/security/securing-communications/tls-transport.asciidoc b/docs/reference/security/securing-communications/tls-transport.asciidoc similarity index 100% rename from x-pack/docs/en/security/securing-communications/tls-transport.asciidoc rename to docs/reference/security/securing-communications/tls-transport.asciidoc diff --git a/docs/reference/sql/endpoints/client-apps/dbeaver.asciidoc b/docs/reference/sql/endpoints/client-apps/dbeaver.asciidoc new file mode 100644 index 0000000000000..ac5ea875e1c5c --- /dev/null +++ b/docs/reference/sql/endpoints/client-apps/dbeaver.asciidoc @@ -0,0 +1,57 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-client-apps-dbeaver]] +=== DBeaver + +[quote, https://dbeaver.io/] +____ +https://dbeaver.io/[DBeaver] DBeaver is free and open source universal database tool for developers and database administrators. +____ + +==== Prerequisites + +* DBeaver version 5.1.4 or higher +* {es-sql} <> + +==== New Connection + +Create a new connection either through the menu *File* > *New* > *Database Connection* menu or directly through the *Database Connection* panel. + +image:images/sql/client-apps/dbeaver-1-new-conn.png[] + +==== Select {es} type +Select the {es} type from the available connection types: + +image:images/sql/client-apps/dbeaver-2-conn-es.png[] + +==== Specify the {es} cluster information + +Configure the {es-sql} connection appropriately: + +image:images/sql/client-apps/dbeaver-3-conn-props.png[] + +==== Verify the driver version + +Make sure the correct JDBC driver version is used by using the *Edit Driver Settings* button: + +image:images/sql/client-apps/dbeaver-4-driver-ver.png[] + +DBeaver is aware of the {es} JDBC maven repository so simply *Download/Update* the artifact or add a new one. As an alternative one can add a local file instead if the {es} Maven repository is not an option. + +When changing the driver, make sure to click on the *Find Class* button at the bottom - the Driver class should be picked out automatically however this provides a sanity check that the driver jar is properly found and it is not corrupt. + +==== Test connectivity + +Once the driver version and the settings are in place, use *Test Connection* to check that everything works. If things are okay, one should get a confirmation window with the version of the driver and that of {es-sql}: + +image:images/sql/client-apps/dbeaver-5-test-conn.png[] + +Click *Finish* and the new {es} connection appears in the *Database Connection* panel. + +DBeaver is now configured to talk to {es}. + +==== Connect to {es} + +Simply click on the {es} connection and start querying and exploring {es}: + +image:images/sql/client-apps/dbeaver-6-data.png[] \ No newline at end of file diff --git a/docs/reference/sql/endpoints/client-apps/dbvis.asciidoc b/docs/reference/sql/endpoints/client-apps/dbvis.asciidoc new file mode 100644 index 0000000000000..dabee6430fa20 --- /dev/null +++ b/docs/reference/sql/endpoints/client-apps/dbvis.asciidoc @@ -0,0 +1,42 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-client-apps-dbvis]] +=== DbVisualizer + +[quote, http://www.dbvis.com/] +____ +https://www.dbvis.com/[DbVisualizer] is a database management and analysis tool for all major databases. +____ + +==== Prerequisites + +* {es-sql} <> + +==== Add {es} JDBC driver + +Add the {es} JDBC driver to DbVisualizer through *Tools* > *Driver Manager*: + +image:images/sql/client-apps/dbvis-1-driver-manager.png[] + +Create a new driver entry through *Driver* > *Create Driver* entry and add the JDBC driver in the files panel +through the buttons on the right. Once specify, the driver class and its version should be automatically picked up - one can force the refresh through the *Find driver in liste locations* button, the second from the bottom on the right hand side: + +image:images/sql/client-apps/dbvis-2-driver.png[] + +==== Create a new connection + +Once the {es} driver is in place, create a new connection: + +image:images/sql/client-apps/dbvis-3-new-conn.png[] + +One can use the wizard or add the settings all at once: + +image:images/sql/client-apps/dbvis-4-conn-props.png[] + +Press *Connect* and the driver version (as that of the cluster) should show up under *Connection Message*. + +==== Execute SQL queries + +The setup is done. DbVisualizer can be used to run queries against {es} and explore its content: + +image:images/sql/client-apps/dbvis-5-data.png[] \ No newline at end of file diff --git a/docs/reference/sql/endpoints/client-apps/index.asciidoc b/docs/reference/sql/endpoints/client-apps/index.asciidoc new file mode 100644 index 0000000000000..ee9891040d0a0 --- /dev/null +++ b/docs/reference/sql/endpoints/client-apps/index.asciidoc @@ -0,0 +1,21 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-client-apps]] +== SQL Client Applications + +Thanks to its <> interface, {es-sql} supports a broad range of applications. +This section lists, in alphabetical order, a number of them and their respective configuration - the list however is by no means comprehensive (feel free to https://www.elastic.co/blog/art-of-pull-request[submit a PR] to improve it): +as long as the app can use the {es-sql} driver, it can use {es-sql}. + +* <> +* <> +* <> +* <> + +NOTE: Each application has its own requirements and license; these are outside the scope of this documentation +which covers only the configuration aspect with {es-sql}. + +include::dbeaver.asciidoc[] +include::dbvis.asciidoc[] +include::squirrel.asciidoc[] +include::workbench.asciidoc[] diff --git a/docs/reference/sql/endpoints/client-apps/squirrel.asciidoc b/docs/reference/sql/endpoints/client-apps/squirrel.asciidoc new file mode 100644 index 0000000000000..c5a30ab15c9ef --- /dev/null +++ b/docs/reference/sql/endpoints/client-apps/squirrel.asciidoc @@ -0,0 +1,50 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-client-apps-squirrel]] +=== SQquirelL SQL + +[quote, http://squirrel-sql.sourceforge.net/] +____ +http://squirrel-sql.sourceforge.net/[SQuirelL SQL] is a graphical, [multi-platform] Java program that will allow you to view the structure of a JDBC compliant database [...]. +____ + +==== Prerequisites + +* {es-sql} <> + +==== Add {es} JDBC Driver + +To add the {es} JDBC driver, use *Windows* > *View Drivers* menu (or Ctrl+Shift+D shortcut): + +image:images/sql/client-apps/squirell-1-view-drivers.png[] + +This opens up the `Drivers` panel on the left. Click on the `+` sign to create a new driver: + +image:images/sql/client-apps/squirell-2-new-driver.png[] + +Select the *Extra Class Path* tab and *Add* the JDBC jar. *List Drivers* to have the `Class Name` filled-in +automatically and name the connection: + +image:images/sql/client-apps/squirell-3-add-driver.png[] + +The driver should now appear in the list: + +image:images/sql/client-apps/squirell-4-driver-list.png[] + +==== Add an alias for {es} + +Add a new connection or in SQuirelL terminology an _alias_ using the new driver. To do so, select the *Aliases* panel on the left and click the `+` sign: + +image:images/sql/client-apps/squirell-5-add-alias.png[] + +Name the new alias and select the `Elasticsearch` driver previously added: + +image:images/sql/client-apps/squirell-6-alias-props.png[] + +The setup is completed. Double check it by clicking on *Test Connection*. + +==== Execute SQL queries + +The connection should open automatically (if it has been created before simply click on *Connect* in the *Alias* panel). SQuirelL SQL can now issue SQL commands to {es}: + +image:images/sql/client-apps/squirell-7-data.png[] \ No newline at end of file diff --git a/docs/reference/sql/endpoints/client-apps/workbench.asciidoc b/docs/reference/sql/endpoints/client-apps/workbench.asciidoc new file mode 100644 index 0000000000000..e50a086ab3b63 --- /dev/null +++ b/docs/reference/sql/endpoints/client-apps/workbench.asciidoc @@ -0,0 +1,40 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-client-apps-workbench]] +=== SQL Workbench/J + +[quote, https://www.sql-workbench.eu/] +____ +https://www.sql-workbench.eu/[SQL Workbench/J] is a free, DBMS-independent, cross-platform SQL query tool. +____ + +==== Prerequisites + +* {es-sql} <> + +==== Add {es} JDBC driver + +Add the {es} JDBC driver to SQL Workbench/J through *Manage Drivers* either from the main windows in the *File* menu or from the *Connect* window: + +image:images/sql/client-apps/workbench-1-manage-drivers.png[] + +Add a new entry to the list through the blank page button in the upper left corner. Add the JDBC jar, provide a name and click on the magnifier button to have the driver *Classname* picked-up automatically: + +image:images/sql/client-apps/workbench-2-add-driver.png[] + +==== Create a new connection profile + +With the driver configured, create a new connection profile through *File* > *Connect Window* (or Alt+C shortcut): + +image:images/sql/client-apps/workbench-3-connection.png[] + +Select the previously configured driver and set the URL of your cluster using the JDBC syntax. +Verify the connection through the *Test* button - a confirmation window should appear that everything is properly configured. + +The setup is complete. + +==== Execute SQL queries + +SQL Workbench/J is ready to talk to {es} through SQL: click on the profile created to execute statements or explore the data: + +image:images/sql/client-apps/workbench-4-data.png[] \ No newline at end of file diff --git a/docs/reference/sql/endpoints/index.asciidoc b/docs/reference/sql/endpoints/index.asciidoc index dc4289aef92cc..59c397f97aa46 100644 --- a/docs/reference/sql/endpoints/index.asciidoc +++ b/docs/reference/sql/endpoints/index.asciidoc @@ -2,3 +2,4 @@ include::rest.asciidoc[] include::translate.asciidoc[] include::cli.asciidoc[] include::jdbc.asciidoc[] +include::client-apps/index.asciidoc[] diff --git a/docs/reference/sql/endpoints/jdbc.asciidoc b/docs/reference/sql/endpoints/jdbc.asciidoc index 6a8793f7e24e2..a8a866ac93cb1 100644 --- a/docs/reference/sql/endpoints/jdbc.asciidoc +++ b/docs/reference/sql/endpoints/jdbc.asciidoc @@ -3,14 +3,20 @@ [[sql-jdbc]] == SQL JDBC -Elasticsearch's SQL jdbc driver is a rich, fully featured JDBC driver for Elasticsearch. +{es}'s SQL jdbc driver is a rich, fully featured JDBC driver for {es}. It is Type 4 driver, meaning it is a platform independent, stand-alone, Direct to Database, -pure Java driver that converts JDBC calls to Elasticsearch SQL. +pure Java driver that converts JDBC calls to {es-sql}. +[[sql-jdbc-installation]] [float] === Installation -The JDBC driver can be obtained either by downloading it from the https://www.elastic.co/downloads/jdbc-client[elastic.co] site or by using a http://maven.apache.org/[Maven]-compatible tool with the following dependency: +The JDBC driver can be obtained from: + +Dedicated page:: +https://www.elastic.co/downloads/jdbc-client[elastic.co] provides links, typically for manual downloads. +Maven dependency:: +http://maven.apache.org/[Maven]-compatible tools can retrieve it automatically as a dependency: ["source","xml",subs="attributes"] ---- diff --git a/docs/reference/sql/index.asciidoc b/docs/reference/sql/index.asciidoc index 33b9da9fab93d..aa9eebea7b7f6 100644 --- a/docs/reference/sql/index.asciidoc +++ b/docs/reference/sql/index.asciidoc @@ -36,6 +36,8 @@ indices and return results in tabular format. SQL and print tabular results. <>:: A JDBC driver for {es}. +<>:: + Documentation for configuring various SQL/BI tools with {es-sql}. <>:: Overview of the {es-sql} language, such as supported data types, commands and syntax. diff --git a/docs/reference/sql/language/syntax/describe-table.asciidoc b/docs/reference/sql/language/syntax/describe-table.asciidoc index 66c1829c14f99..ebefe9bc34b67 100644 --- a/docs/reference/sql/language/syntax/describe-table.asciidoc +++ b/docs/reference/sql/language/syntax/describe-table.asciidoc @@ -6,9 +6,12 @@ .Synopsis [source, sql] ---- -DESCRIBE [table identifier<1>|[LIKE pattern<2>]] +DESCRIBE [table identifier<1> | [LIKE pattern<2>]] ---- +<1> single table identifier or double quoted es multi index +<2> SQL LIKE pattern + or [source, sql] @@ -16,6 +19,8 @@ or DESC [table identifier<1>|[LIKE pattern<2>]] ---- +<1> single table identifier or double quoted es multi index +<2> SQL LIKE pattern .Description diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index 062016909b651..7bd5cc3a8d2b3 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -111,6 +111,7 @@ protected void doRun() throws Exception { return future; } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/33616") public void testRecoveryWithConcurrentIndexing() throws Exception { final String index = "recovery_with_concurrent_indexing"; Response response = client().performRequest(new Request("GET", "_nodes")); @@ -183,6 +184,7 @@ private String getNodeId(Predicate versionPredicate) throws IOException } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/33616") public void testRelocationWithConcurrentIndexing() throws Exception { final String index = "relocation_with_concurrent_indexing"; switch (CLUSTER_TYPE) { diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 5244cdd726d05..23984e58749f7 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -458,7 +458,7 @@ public final String getRaw(final Settings settings) { * @return the raw string representation of the setting value */ String innerGetRaw(final Settings settings) { - return settings.get(getKey(), defaultValue.apply(settings), isListSetting()); + return settings.get(getKey(), defaultValue.apply(settings)); } /** Logs a deprecation warning if the setting is deprecated and used. */ diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 30cfee81ddd40..99fde0855f94a 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -180,6 +180,7 @@ public void testSimpleUpdate() { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33135") public void testValidateStringSetting() { Settings settings = Settings.builder().putList("foo.bar", Arrays.asList("bla-a", "bla-b")).build(); Setting stringSetting = Setting.simpleString("foo.bar", Property.NodeScope); diff --git a/server/src/test/java/org/elasticsearch/index/shard/GlobalCheckpointListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/GlobalCheckpointListenersTests.java index e5e2453682fc8..4ab278cc02a97 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/GlobalCheckpointListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/GlobalCheckpointListenersTests.java @@ -49,10 +49,13 @@ import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.hamcrest.Matchers.any; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Matchers.argThat; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.times; @@ -560,19 +563,19 @@ public void testTimeoutNotificationUsesExecutor() throws InterruptedException { } public void testFailingListenerAfterTimeout() throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); final Logger mockLogger = mock(Logger.class); + doAnswer(invocationOnMock -> { + latch.countDown(); + return null; + }).when(mockLogger).warn(argThat(any(String.class)), argThat(any(RuntimeException.class))); final GlobalCheckpointListeners globalCheckpointListeners = new GlobalCheckpointListeners(shardId, Runnable::run, scheduler, mockLogger); - final CountDownLatch latch = new CountDownLatch(1); final TimeValue timeout = TimeValue.timeValueMillis(randomIntBetween(1, 50)); globalCheckpointListeners.add( NO_OPS_PERFORMED, (g, e) -> { - try { - throw new RuntimeException("failure"); - } finally { - latch.countDown(); - } + throw new RuntimeException("failure"); }, timeout); latch.await(); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index 8fe1daefe6d48..2c659ac60ec4a 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -412,7 +412,7 @@ public void testMaybeRollTranslogGeneration() throws Exception { } } - @TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE,org.elasticsearch.index.engine:TRACE") + @TestLogging("org.elasticsearch.index.shard:TRACE,org.elasticsearch.index.engine:TRACE") public void testStressMaybeFlushOrRollTranslogGeneration() throws Exception { createIndex("test"); ensureGreen(); diff --git a/x-pack/docs/en/security/configuring-es.asciidoc b/x-pack/docs/en/security/configuring-es.asciidoc index 5fd9ed610cb3e..7bdfbef08deb5 100644 --- a/x-pack/docs/en/security/configuring-es.asciidoc +++ b/x-pack/docs/en/security/configuring-es.asciidoc @@ -136,10 +136,15 @@ By default, events are logged to a dedicated `elasticsearch-access.log` file in easier analysis and control what events are logged. -- -include::securing-communications/securing-elasticsearch.asciidoc[] -include::securing-communications/configuring-tls-docker.asciidoc[] -include::securing-communications/enabling-cipher-suites.asciidoc[] -include::securing-communications/separating-node-client-traffic.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/securing-elasticsearch.asciidoc +include::{es-repo-dir}/security/securing-communications/securing-elasticsearch.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/configuring-tls-docker.asciidoc +include::{es-repo-dir}/security/securing-communications/configuring-tls-docker.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/enabling-cipher-suites.asciidoc +include::{es-repo-dir}/security/securing-communications/enabling-cipher-suites.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/separating-node-client-traffic.asciidoc +include::{es-repo-dir}/security/securing-communications/separating-node-client-traffic.asciidoc[] +:edit_url: include::authentication/configuring-active-directory-realm.asciidoc[] include::authentication/configuring-file-realm.asciidoc[] include::authentication/configuring-ldap-realm.asciidoc[] @@ -148,6 +153,9 @@ include::authentication/configuring-pki-realm.asciidoc[] include::authentication/configuring-saml-realm.asciidoc[] :edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/authentication/configuring-kerberos-realm.asciidoc include::authentication/configuring-kerberos-realm.asciidoc[] +:edit_url: include::fips-140-compliance.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/settings/security-settings.asciidoc include::{es-repo-dir}/settings/security-settings.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/settings/audit-settings.asciidoc include::{es-repo-dir}/settings/audit-settings.asciidoc[] diff --git a/x-pack/docs/en/security/securing-communications.asciidoc b/x-pack/docs/en/security/securing-communications.asciidoc index 11f6b3dc5616e..84f3b0bc27ac6 100644 --- a/x-pack/docs/en/security/securing-communications.asciidoc +++ b/x-pack/docs/en/security/securing-communications.asciidoc @@ -17,10 +17,8 @@ This section shows how to: The authentication of new nodes helps prevent a rogue node from joining the cluster and receiving data through replication. -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/securing-communications/setting-up-ssl.asciidoc -include::securing-communications/setting-up-ssl.asciidoc[] - -//TO-DO: These sections can be removed when all links to them are removed. +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/setting-up-ssl.asciidoc +include::{es-repo-dir}/security/securing-communications/setting-up-ssl.asciidoc[] [[ciphers]] === Enabling cipher suites for stronger encryption diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowIndexRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowIndexRequestTests.java index 2017fa2fdb989..e5f7e693a7f1c 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowIndexRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowIndexRequestTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.ccr.action; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; @@ -12,6 +13,10 @@ import java.io.IOException; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + public class FollowIndexRequestTests extends AbstractStreamableXContentTestCase { @Override @@ -39,4 +44,21 @@ static FollowIndexAction.Request createTestRequest() { randomIntBetween(1, Integer.MAX_VALUE), randomNonNegativeLong(), randomIntBetween(1, Integer.MAX_VALUE), randomIntBetween(1, Integer.MAX_VALUE), TimeValue.timeValueMillis(500), TimeValue.timeValueMillis(500)); } + + public void testValidate() { + FollowIndexAction.Request request = new FollowIndexAction.Request("index1", "index2", null, null, null, null, + null, TimeValue.ZERO, null); + ActionRequestValidationException validationException = request.validate(); + assertThat(validationException, notNullValue()); + assertThat(validationException.getMessage(), containsString("[max_retry_delay] must be positive but was [0ms]")); + + request = new FollowIndexAction.Request("index1", "index2", null, null, null, null, null, TimeValue.timeValueMinutes(10), null); + validationException = request.validate(); + assertThat(validationException, notNullValue()); + assertThat(validationException.getMessage(), containsString("[max_retry_delay] must be less than [5m] but was [10m]")); + + request = new FollowIndexAction.Request("index1", "index2", null, null, null, null, null, TimeValue.timeValueMinutes(1), null); + validationException = request.validate(); + assertThat(validationException, nullValue()); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java index 71fd13d0b504f..9c64ea3da764c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java @@ -169,7 +169,7 @@ public static class AutoFollowPattern implements Writeable, ToXContentObject { public static final ParseField MAX_BATCH_SIZE_IN_BYTES = new ParseField("max_batch_size_in_bytes"); public static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); public static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); - public static final ParseField MAX_RETRY_DELAY = new ParseField("retry_timeout"); + public static final ParseField MAX_RETRY_DELAY = new ParseField("max_retry_delay"); public static final ParseField IDLE_SHARD_RETRY_DELAY = new ParseField("idle_shard_retry_delay"); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowIndexAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowIndexAction.java index 75bcd316de5f7..65136b41a29e0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowIndexAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowIndexAction.java @@ -23,6 +23,8 @@ import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.action.ValidateActions.addValidationError; + public final class FollowIndexAction extends Action { public static final FollowIndexAction INSTANCE = new FollowIndexAction(); @@ -33,7 +35,8 @@ public final class FollowIndexAction extends Action { public static final int DEFAULT_MAX_CONCURRENT_READ_BATCHES = 1; public static final int DEFAULT_MAX_CONCURRENT_WRITE_BATCHES = 1; public static final long DEFAULT_MAX_BATCH_SIZE_IN_BYTES = Long.MAX_VALUE; - public static final TimeValue DEFAULT_RETRY_TIMEOUT = new TimeValue(500); + static final TimeValue DEFAULT_MAX_RETRY_DELAY = new TimeValue(500); + static final TimeValue MAX_RETRY_DELAY = TimeValue.timeValueMinutes(5); public static final TimeValue DEFAULT_POLL_TIMEOUT = TimeValue.timeValueMinutes(1); private FollowIndexAction() { @@ -54,7 +57,7 @@ public static class Request extends ActionRequest implements ToXContentObject { private static final ParseField MAX_BATCH_SIZE_IN_BYTES = new ParseField("max_batch_size_in_bytes"); private static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); private static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); - private static final ParseField MAX_RETRY_DELAY = new ParseField("max_retry_delay"); + private static final ParseField MAX_RETRY_DELAY_FIELD = new ParseField("max_retry_delay"); private static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout"); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, true, (args, followerIndex) -> { @@ -75,8 +78,8 @@ public static class Request extends ActionRequest implements ToXContentObject { PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_WRITE_BUFFER_SIZE); PARSER.declareField( ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> TimeValue.parseTimeValue(p.text(), MAX_RETRY_DELAY.getPreferredName()), - MAX_RETRY_DELAY, + (p, c) -> TimeValue.parseTimeValue(p.text(), MAX_RETRY_DELAY_FIELD.getPreferredName()), + MAX_RETRY_DELAY_FIELD, ObjectParser.ValueType.STRING); PARSER.declareField( ConstructingObjectParser.optionalConstructorArg(), @@ -202,7 +205,7 @@ public Request( throw new IllegalArgumentException(MAX_WRITE_BUFFER_SIZE.getPreferredName() + " must be larger than 0"); } - final TimeValue actualRetryTimeout = maxRetryDelay == null ? DEFAULT_RETRY_TIMEOUT : maxRetryDelay; + final TimeValue actualRetryTimeout = maxRetryDelay == null ? DEFAULT_MAX_RETRY_DELAY : maxRetryDelay; final TimeValue actualPollTimeout = pollTimeout == null ? DEFAULT_POLL_TIMEOUT : pollTimeout; this.leaderIndex = leaderIndex; @@ -222,7 +225,20 @@ public Request() { @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = null; + + if (maxRetryDelay.millis() <= 0) { + String message = "[" + MAX_RETRY_DELAY_FIELD.getPreferredName() + "] must be positive but was [" + + maxRetryDelay.getStringRep() + "]"; + validationException = addValidationError(message, validationException); + } + if (maxRetryDelay.millis() > FollowIndexAction.MAX_RETRY_DELAY.millis()) { + String message = "[" + MAX_RETRY_DELAY_FIELD.getPreferredName() + "] must be less than [" + MAX_RETRY_DELAY + + "] but was [" + maxRetryDelay.getStringRep() + "]"; + validationException = addValidationError(message, validationException); + } + + return validationException; } @Override @@ -264,7 +280,7 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize); builder.field(MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); builder.field(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); - builder.field(MAX_RETRY_DELAY.getPreferredName(), maxRetryDelay.getStringRep()); + builder.field(MAX_RETRY_DELAY_FIELD.getPreferredName(), maxRetryDelay.getStringRep()); builder.field(POLL_TIMEOUT.getPreferredName(), pollTimeout.getStringRep()); } builder.endObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index dc69795bb4a01..01ebd3f1d81f1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -94,10 +94,25 @@ public static Request fromXContent(XContentParser parser, String remoteClusterAl public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (leaderClusterAlias == null) { - validationException = addValidationError("leaderClusterAlias is missing", validationException); + validationException = addValidationError("[" + LEADER_CLUSTER_ALIAS_FIELD.getPreferredName() + + "] is missing", validationException); } if (leaderIndexPatterns == null || leaderIndexPatterns.isEmpty()) { - validationException = addValidationError("leaderIndexPatterns is missing", validationException); + validationException = addValidationError("[" + LEADER_INDEX_PATTERNS_FIELD.getPreferredName() + + "] is missing", validationException); + } + if (maxRetryDelay != null) { + if (maxRetryDelay.millis() <= 0) { + String message = "[" + AutoFollowPattern.MAX_RETRY_DELAY.getPreferredName() + "] must be positive but was [" + + maxRetryDelay.getStringRep() + "]"; + validationException = addValidationError(message, validationException); + } + if (maxRetryDelay.millis() > FollowIndexAction.MAX_RETRY_DELAY.millis()) { + String message = "[" + AutoFollowPattern.MAX_RETRY_DELAY.getPreferredName() + "] must be less than [" + + FollowIndexAction.MAX_RETRY_DELAY + + "] but was [" + maxRetryDelay.getStringRep() + "]"; + validationException = addValidationError(message, validationException); + } } return validationException; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternRequestTests.java index f11e1885e80e3..ced49bbae128a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternRequestTests.java @@ -5,12 +5,18 @@ */ package org.elasticsearch.xpack.core.ccr.action; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import java.io.IOException; import java.util.Arrays; +import java.util.Collections; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class PutAutoFollowPatternRequestTests extends AbstractStreamableXContentTestCase { @@ -60,4 +66,39 @@ protected PutAutoFollowPatternAction.Request createTestInstance() { } return request; } + + public void testValidate() { + PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + ActionRequestValidationException validationException = request.validate(); + assertThat(validationException, notNullValue()); + assertThat(validationException.getMessage(), containsString("[leader_cluster_alias] is missing")); + + request.setLeaderClusterAlias("_alias"); + validationException = request.validate(); + assertThat(validationException, notNullValue()); + assertThat(validationException.getMessage(), containsString("[leader_index_patterns] is missing")); + + request.setLeaderIndexPatterns(Collections.emptyList()); + validationException = request.validate(); + assertThat(validationException, notNullValue()); + assertThat(validationException.getMessage(), containsString("[leader_index_patterns] is missing")); + + request.setLeaderIndexPatterns(Collections.singletonList("logs-*")); + validationException = request.validate(); + assertThat(validationException, nullValue()); + + request.setMaxRetryDelay(TimeValue.ZERO); + validationException = request.validate(); + assertThat(validationException, notNullValue()); + assertThat(validationException.getMessage(), containsString("[max_retry_delay] must be positive but was [0ms]")); + + request.setMaxRetryDelay(TimeValue.timeValueMinutes(10)); + validationException = request.validate(); + assertThat(validationException, notNullValue()); + assertThat(validationException.getMessage(), containsString("[max_retry_delay] must be less than [5m] but was [10m]")); + + request.setMaxRetryDelay(TimeValue.timeValueMinutes(1)); + validationException = request.validate(); + assertThat(validationException, nullValue()); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java index d80bbd1b342af..e91f75964fca2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java @@ -211,6 +211,7 @@ private DeleteByQueryRequest buildDeleteByQuery(String jobId, List forec QueryBuilder query = QueryBuilders.boolQuery().filter(innerBoolQuery); request.setQuery(query); + request.setRefresh(true); return request; } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDocTests.java index 70b73e5eed00f..ed893410c8809 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDocTests.java @@ -7,12 +7,16 @@ package org.elasticsearch.xpack.monitoring.collector.ccr; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.exporter.BaseMonitoringDocTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -20,13 +24,17 @@ import java.io.IOException; import java.util.Collections; +import java.util.Map; import java.util.NavigableMap; import java.util.TreeMap; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; @@ -174,4 +182,65 @@ public void testToXContent() throws IOException { + "}")); } + public void testShardFollowNodeTaskStatusFieldsMapped() throws IOException { + final NavigableMap fetchExceptions = + new TreeMap<>(Collections.singletonMap(1L, new ElasticsearchException("shard is sad"))); + final ShardFollowNodeTaskStatus status = new ShardFollowNodeTaskStatus( + "cluster_alias:leader_index", + "follower_index", + 0, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 100, + 10, + 0, + 10, + 100, + 10, + 10, + 0, + 10, + fetchExceptions, + 2); + XContentBuilder builder = jsonBuilder(); + builder.value(status); + Map serializedStatus = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false); + + Map template = + XContentHelper.convertToMap(XContentType.JSON.xContent(), MonitoringTemplateUtils.loadTemplate("es"), false); + Map ccrStatsMapping = (Map) XContentMapValues.extractValue("mappings.doc.properties.ccr_stats.properties", template); + + assertThat(serializedStatus.size(), equalTo(ccrStatsMapping.size())); + for (Map.Entry entry : serializedStatus.entrySet()) { + String fieldName = entry.getKey(); + Map fieldMapping = (Map) ccrStatsMapping.get(fieldName); + assertThat(fieldMapping, notNullValue()); + + Object fieldValue = entry.getValue(); + String fieldType = (String) fieldMapping.get("type"); + if (fieldValue instanceof Long || fieldValue instanceof Integer) { + assertThat("expected long field type for field [" + fieldName + "]", fieldType, + anyOf(equalTo("long"), equalTo("integer"))); + } else if (fieldValue instanceof String) { + assertThat("expected keyword field type for field [" + fieldName + "]", fieldType, + anyOf(equalTo("keyword"), equalTo("text"))); + } else { + // Manual test specific object fields and if not just fail: + if (fieldName.equals("fetch_exceptions")) { + assertThat(XContentMapValues.extractValue("properties.from_seq_no.type", fieldMapping), equalTo("long")); + assertThat(XContentMapValues.extractValue("properties.exception.type", fieldMapping), equalTo("text")); + } else { + fail("unexpected field value type [" + fieldValue.getClass() + "] for field [" + fieldName + "]"); + } + } + } + } + } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java index 5cb63a3376348..d2e24f3edac4a 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java @@ -368,7 +368,7 @@ public boolean isCatalogAtStart() throws SQLException { @Override public String getCatalogSeparator() throws SQLException { - return "."; + return ":"; } @Override diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaDataTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaDataTests.java new file mode 100644 index 0000000000000..cfa6e797260cb --- /dev/null +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaDataTests.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.jdbc.jdbc; + +import org.elasticsearch.test.ESTestCase; + +public class JdbcDatabaseMetaDataTests extends ESTestCase { + + private JdbcDatabaseMetaData md = new JdbcDatabaseMetaData(null); + + public void testSeparators() throws Exception { + assertEquals(":", md.getCatalogSeparator()); + assertEquals("\"", md.getIdentifierQuoteString()); + assertEquals("\\", md.getSearchStringEscape()); + + } +}