listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::closeJob,
+ options,
+ CloseJobResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets the buckets for a Machine Learning Job.
+ *
+ * For additional info
+ * see ML GET buckets documentation
+ *
+ * @param request The request
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ */
+ public GetBucketsResponse getBuckets(GetBucketsRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request,
+ MLRequestConverters::getBuckets,
+ options,
+ GetBucketsResponse::fromXContent,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets the buckets for a Machine Learning Job, notifies listener once the requested buckets are retrieved.
+ *
+ * For additional info
+ * see ML GET buckets documentation
+ *
+ * @param request The request
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener Listener to be notified upon request completion
+ */
+ public void getBucketsAsync(GetBucketsRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::getBuckets,
+ options,
+ GetBucketsResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
+
+ /**
+ * Flushes internally buffered data for the given Machine Learning Job ensuring all data sent to the has been processed.
+ * This may cause new results to be calculated depending on the contents of the buffer
+ *
+ * Both flush and close operations are similar,
+ * however the flush is more efficient if you are expecting to send more data for analysis.
+ *
+ * When flushing, the job remains open and is available to continue analyzing data.
+ * A close operation additionally prunes and persists the model state to disk and the
+ * job must be opened again before analyzing further data.
+ *
+ *
+ * For additional info
+ * see Flush ML job documentation
+ *
+ * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ */
+ public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request,
+ MLRequestConverters::flushJob,
+ options,
+ FlushJobResponse::fromXContent,
+ Collections.emptySet());
+ }
+
+ /**
+ * Flushes internally buffered data for the given Machine Learning Job asynchronously ensuring all data sent to the has been processed.
+ * This may cause new results to be calculated depending on the contents of the buffer
+ *
+ * Both flush and close operations are similar,
+ * however the flush is more efficient if you are expecting to send more data for analysis.
+ *
+ * When flushing, the job remains open and is available to continue analyzing data.
+ * A close operation additionally prunes and persists the model state to disk and the
+ * job must be opened again before analyzing further data.
+ *
+ *
+ * For additional info
+ * see Flush ML job documentation
+ *
+ * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener Listener to be notified upon request completion
+ */
+ public void flushJobAsync(FlushJobRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::flushJob,
+ options,
+ FlushJobResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets usage statistics for one or more Machine Learning jobs
+ *
+ *
+ * For additional info
+ * see Get Job stats docs
+ *
+ * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @return {@link GetJobStatsResponse} response object containing
+ * the {@link JobStats} objects and the number of jobs found
+ * @throws IOException when there is a serialization issue sending the request or receiving the response
+ */
+ public GetJobStatsResponse getJobStats(GetJobStatsRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request,
+ MLRequestConverters::getJobStats,
+ options,
+ GetJobStatsResponse::fromXContent,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets one or more Machine Learning job configuration info, asynchronously.
+ *
+ *
+ * For additional info
+ * see Get Job stats docs
+ *
+ * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener Listener to be notified with {@link GetJobStatsResponse} upon request completion
+ */
+ public void getJobStatsAsync(GetJobStatsRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::getJobStats,
+ options,
+ GetJobStatsResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets the records for a Machine Learning Job.
+ *
+ * For additional info
+ * see ML GET records documentation
+ *
+ * @param request the request
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ */
+ public GetRecordsResponse getRecords(GetRecordsRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request,
+ MLRequestConverters::getRecords,
+ options,
+ GetRecordsResponse::fromXContent,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets the records for a Machine Learning Job, notifies listener once the requested records are retrieved.
+ *
+ * For additional info
+ * see ML GET records documentation
+ *
+ * @param request the request
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener Listener to be notified upon request completion
+ */
+ public void getRecordsAsync(GetRecordsRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::getRecords,
+ options,
+ GetRecordsResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
index 973c0ce126d37..5e74262fa20e5 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
@@ -88,6 +88,7 @@
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.update.UpdateRequest;
+import org.elasticsearch.client.security.RefreshPolicy;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority;
@@ -106,16 +107,18 @@
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.rankeval.RankEvalRequest;
+import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest;
+import org.elasticsearch.index.reindex.ReindexRequest;
+import org.elasticsearch.index.reindex.UpdateByQueryRequest;
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
import org.elasticsearch.protocol.xpack.XPackUsageRequest;
import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest;
import org.elasticsearch.protocol.xpack.license.GetLicenseRequest;
import org.elasticsearch.protocol.xpack.license.PutLicenseRequest;
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
-import org.elasticsearch.protocol.xpack.ml.OpenJobRequest;
-import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
+import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
import org.elasticsearch.script.mustache.SearchTemplateRequest;
@@ -821,6 +824,48 @@ static Request clusterHealth(ClusterHealthRequest healthRequest) {
return request;
}
+ static Request reindex(ReindexRequest reindexRequest) throws IOException {
+ String endpoint = new EndpointBuilder().addPathPart("_reindex").build();
+ Request request = new Request(HttpPost.METHOD_NAME, endpoint);
+ Params params = new Params(request)
+ .withRefresh(reindexRequest.isRefresh())
+ .withTimeout(reindexRequest.getTimeout())
+ .withWaitForActiveShards(reindexRequest.getWaitForActiveShards());
+
+ if (reindexRequest.getScrollTime() != null) {
+ params.putParam("scroll", reindexRequest.getScrollTime());
+ }
+ request.setEntity(createEntity(reindexRequest, REQUEST_BODY_CONTENT_TYPE));
+ return request;
+ }
+
+ static Request updateByQuery(UpdateByQueryRequest updateByQueryRequest) throws IOException {
+ String endpoint =
+ endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query");
+ Request request = new Request(HttpPost.METHOD_NAME, endpoint);
+ Params params = new Params(request)
+ .withRouting(updateByQueryRequest.getRouting())
+ .withPipeline(updateByQueryRequest.getPipeline())
+ .withRefresh(updateByQueryRequest.isRefresh())
+ .withTimeout(updateByQueryRequest.getTimeout())
+ .withWaitForActiveShards(updateByQueryRequest.getWaitForActiveShards())
+ .withIndicesOptions(updateByQueryRequest.indicesOptions());
+ if (updateByQueryRequest.isAbortOnVersionConflict() == false) {
+ params.putParam("conflicts", "proceed");
+ }
+ if (updateByQueryRequest.getBatchSize() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE) {
+ params.putParam("scroll_size", Integer.toString(updateByQueryRequest.getBatchSize()));
+ }
+ if (updateByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) {
+ params.putParam("scroll", updateByQueryRequest.getScrollTime());
+ }
+ if (updateByQueryRequest.getSize() > 0) {
+ params.putParam("size", Integer.toString(updateByQueryRequest.getSize()));
+ }
+ request.setEntity(createEntity(updateByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
+ return request;
+ }
+
static Request rollover(RolloverRequest rolloverRequest) throws IOException {
String endpoint = new EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover")
.addPathPart(rolloverRequest.getNewIndexName()).build();
@@ -1126,6 +1171,13 @@ static Request xPackInfo(XPackInfoRequest infoRequest) {
return request;
}
+ static Request xPackGraphExplore(GraphExploreRequest exploreRequest) throws IOException {
+ String endpoint = endpoint(exploreRequest.indices(), exploreRequest.types(), "_xpack/graph/_explore");
+ Request request = new Request(HttpGet.METHOD_NAME, endpoint);
+ request.setEntity(createEntity(exploreRequest, REQUEST_BODY_CONTENT_TYPE));
+ return request;
+ }
+
static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
@@ -1199,31 +1251,6 @@ static Request deleteLicense(DeleteLicenseRequest deleteLicenseRequest) {
return request;
}
- static Request putMachineLearningJob(PutJobRequest putJobRequest) throws IOException {
- String endpoint = new EndpointBuilder()
- .addPathPartAsIs("_xpack")
- .addPathPartAsIs("ml")
- .addPathPartAsIs("anomaly_detectors")
- .addPathPart(putJobRequest.getJob().getId())
- .build();
- Request request = new Request(HttpPut.METHOD_NAME, endpoint);
- request.setEntity(createEntity(putJobRequest, REQUEST_BODY_CONTENT_TYPE));
- return request;
- }
-
- static Request machineLearningOpenJob(OpenJobRequest openJobRequest) throws IOException {
- String endpoint = new EndpointBuilder()
- .addPathPartAsIs("_xpack")
- .addPathPartAsIs("ml")
- .addPathPartAsIs("anomaly_detectors")
- .addPathPart(openJobRequest.getJobId())
- .addPathPartAsIs("_open")
- .build();
- Request request = new Request(HttpPost.METHOD_NAME, endpoint);
- request.setJsonEntity(openJobRequest.toString());
- return request;
- }
-
static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) {
EndpointBuilder endpointBuilder = new EndpointBuilder()
.addPathPartAsIs("_xpack/migration/assistance")
@@ -1235,7 +1262,7 @@ static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRe
return request;
}
- private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
+ static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef();
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
}
@@ -1356,11 +1383,16 @@ Params withRealtime(boolean realtime) {
Params withRefresh(boolean refresh) {
if (refresh) {
- return withRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
+ return withRefreshPolicy(RefreshPolicy.IMMEDIATE);
}
return this;
}
+ /**
+ * @deprecated If creating a new HLRC ReST API call, use {@link RefreshPolicy}
+ * instead of {@link WriteRequest.RefreshPolicy} from the server project
+ */
+ @Deprecated
Params withRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) {
if (refreshPolicy != WriteRequest.RefreshPolicy.NONE) {
return putParam("refresh", refreshPolicy.getValue());
@@ -1368,6 +1400,13 @@ Params withRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) {
return this;
}
+ Params withRefreshPolicy(RefreshPolicy refreshPolicy) {
+ if (refreshPolicy != RefreshPolicy.NONE) {
+ return putParam("refresh", refreshPolicy.getValue());
+ }
+ return this;
+ }
+
Params withRetryOnConflict(int retryOnConflict) {
if (retryOnConflict > 0) {
return putParam("retry_on_conflict", String.valueOf(retryOnConflict));
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
index e705ca12806ba..6e3c5a6fb831e 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
@@ -19,7 +19,6 @@
package org.elasticsearch.client;
-import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
@@ -65,6 +64,9 @@
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.rankeval.RankEvalRequest;
import org.elasticsearch.index.rankeval.RankEvalResponse;
+import org.elasticsearch.index.reindex.BulkByScrollResponse;
+import org.elasticsearch.index.reindex.ReindexRequest;
+import org.elasticsearch.index.reindex.UpdateByQueryRequest;
import org.elasticsearch.plugins.spi.NamedXContentProvider;
import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestStatus;
@@ -177,6 +179,7 @@
import java.util.List;
import java.util.Map;
import java.util.Objects;
+import java.util.Optional;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.function.Function;
@@ -209,6 +212,7 @@ public class RestHighLevelClient implements Closeable {
private final TasksClient tasksClient = new TasksClient(this);
private final XPackClient xPackClient = new XPackClient(this);
private final WatcherClient watcherClient = new WatcherClient(this);
+ private final GraphClient graphClient = new GraphClient(this);
private final LicenseClient licenseClient = new LicenseClient(this);
private final MigrationClient migrationClient = new MigrationClient(this);
private final MachineLearningClient machineLearningClient = new MachineLearningClient(this);
@@ -324,6 +328,16 @@ public final XPackClient xpack() {
* Watcher APIs on elastic.co for more information.
*/
public WatcherClient watcher() { return watcherClient; }
+
+ /**
+ * Provides methods for accessing the Elastic Licensed Graph explore API that
+ * is shipped with the default distribution of Elasticsearch. All of
+ * these APIs will 404 if run against the OSS distribution of Elasticsearch.
+ *
+ * See the
+ * Graph API on elastic.co for more information.
+ */
+ public GraphClient graph() { return graphClient; }
/**
* Provides methods for accessing the Elastic Licensed Licensing APIs that
@@ -384,6 +398,62 @@ public final void bulkAsync(BulkRequest bulkRequest, RequestOptions options, Act
performRequestAsyncAndParseEntity(bulkRequest, RequestConverters::bulk, options, BulkResponse::fromXContent, listener, emptySet());
}
+ /**
+ * Executes a reindex request.
+ * See Reindex API on elastic.co
+ * @param reindexRequest the request
+ * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @return the response
+ * @throws IOException in case there is a problem sending the request or parsing back the response
+ */
+ public final BulkByScrollResponse reindex(ReindexRequest reindexRequest, RequestOptions options) throws IOException {
+ return performRequestAndParseEntity(
+ reindexRequest, RequestConverters::reindex, options, BulkByScrollResponse::fromXContent, emptySet()
+ );
+ }
+
+ /**
+ * Asynchronously executes a reindex request.
+ * See Reindex API on elastic.co
+ * @param reindexRequest the request
+ * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener the listener to be notified upon request completion
+ */
+ public final void reindexAsync(ReindexRequest reindexRequest, RequestOptions options, ActionListener listener) {
+ performRequestAsyncAndParseEntity(
+ reindexRequest, RequestConverters::reindex, options, BulkByScrollResponse::fromXContent, listener, emptySet()
+ );
+ }
+
+ /**
+ * Executes a update by query request.
+ * See
+ * Update By Query API on elastic.co
+ * @param updateByQueryRequest the request
+ * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @return the response
+ * @throws IOException in case there is a problem sending the request or parsing back the response
+ */
+ public final BulkByScrollResponse updateByQuery(UpdateByQueryRequest updateByQueryRequest, RequestOptions options) throws IOException {
+ return performRequestAndParseEntity(
+ updateByQueryRequest, RequestConverters::updateByQuery, options, BulkByScrollResponse::fromXContent, emptySet()
+ );
+ }
+
+ /**
+ * Asynchronously executes an update by query request.
+ * See
+ * Update By Query API on elastic.co
+ * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener the listener to be notified upon request completion
+ */
+ public final void updateByQueryAsync(UpdateByQueryRequest reindexRequest, RequestOptions options,
+ ActionListener listener) {
+ performRequestAsyncAndParseEntity(
+ reindexRequest, RequestConverters::updateByQuery, options, BulkByScrollResponse::fromXContent, listener, emptySet()
+ );
+ }
+
/**
* Pings the remote Elasticsearch cluster and returns true if the ping succeeded, false otherwise
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@@ -949,6 +1019,11 @@ public final void fieldCapsAsync(FieldCapabilitiesRequest fieldCapabilitiesReque
FieldCapabilitiesResponse::fromXContent, listener, emptySet());
}
+ /**
+ * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation
+ * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}.
+ */
+ @Deprecated
protected final Resp performRequestAndParseEntity(Req request,
CheckedFunction requestConverter,
RequestOptions options,
@@ -958,15 +1033,58 @@ protected final Resp performRequestAndParseEnt
response -> parseEntity(response.getEntity(), entityParser), ignores);
}
+ /**
+ * Defines a helper method for performing a request and then parsing the returned entity using the provided entityParser.
+ */
+ protected final Resp performRequestAndParseEntity(Req request,
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction entityParser,
+ Set ignores) throws IOException {
+ return performRequest(request, requestConverter, options,
+ response -> parseEntity(response.getEntity(), entityParser), ignores);
+ }
+
+ /**
+ * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation
+ * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}.
+ */
+ @Deprecated
protected final Resp performRequest(Req request,
- CheckedFunction requestConverter,
- RequestOptions options,
- CheckedFunction responseConverter,
- Set ignores) throws IOException {
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction responseConverter,
+ Set ignores) throws IOException {
ActionRequestValidationException validationException = request.validate();
- if (validationException != null) {
+ if (validationException != null && validationException.validationErrors().isEmpty() == false) {
throw validationException;
}
+ return internalPerformRequest(request, requestConverter, options, responseConverter, ignores);
+ }
+
+ /**
+ * Defines a helper method for performing a request.
+ */
+ protected final Resp performRequest(Req request,
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction responseConverter,
+ Set ignores) throws IOException {
+ Optional validationException = request.validate();
+ if (validationException != null && validationException.isPresent()) {
+ throw validationException.get();
+ }
+ return internalPerformRequest(request, requestConverter, options, responseConverter, ignores);
+ }
+
+ /**
+ * Provides common functionality for performing a request.
+ */
+ private Resp internalPerformRequest(Req request,
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction responseConverter,
+ Set ignores) throws IOException {
Request req = requestConverter.apply(request);
req.setOptions(options);
Response response;
@@ -994,25 +1112,75 @@ protected final Resp performRequest(Req reques
}
}
+ /**
+ * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation
+ * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}.
+ */
+ @Deprecated
protected final void performRequestAsyncAndParseEntity(Req request,
- CheckedFunction requestConverter,
- RequestOptions options,
- CheckedFunction entityParser,
- ActionListener listener, Set ignores) {
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction entityParser,
+ ActionListener listener, Set ignores) {
+ performRequestAsync(request, requestConverter, options,
+ response -> parseEntity(response.getEntity(), entityParser), listener, ignores);
+ }
+
+ /**
+ * Defines a helper method for asynchronously performing a request.
+ */
+ protected final void performRequestAsyncAndParseEntity(Req request,
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction entityParser,
+ ActionListener listener, Set ignores) {
performRequestAsync(request, requestConverter, options,
response -> parseEntity(response.getEntity(), entityParser), listener, ignores);
}
+
+ /**
+ * @deprecated If creating a new HLRC ReST API call, consider creating new actions instead of reusing server actions. The Validation
+ * layer has been added to the ReST client, and requests should extend {@link Validatable} instead of {@link ActionRequest}.
+ */
+ @Deprecated
protected final void performRequestAsync(Req request,
- CheckedFunction requestConverter,
- RequestOptions options,
- CheckedFunction responseConverter,
- ActionListener listener, Set ignores) {
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction responseConverter,
+ ActionListener listener, Set ignores) {
ActionRequestValidationException validationException = request.validate();
- if (validationException != null) {
+ if (validationException != null && validationException.validationErrors().isEmpty() == false) {
listener.onFailure(validationException);
return;
}
+ internalPerformRequestAsync(request, requestConverter, options, responseConverter, listener, ignores);
+ }
+
+ /**
+ * Defines a helper method for asynchronously performing a request.
+ */
+ protected final void performRequestAsync(Req request,
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction responseConverter,
+ ActionListener listener, Set ignores) {
+ Optional validationException = request.validate();
+ if (validationException != null && validationException.isPresent()) {
+ listener.onFailure(validationException.get());
+ return;
+ }
+ internalPerformRequestAsync(request, requestConverter, options, responseConverter, listener, ignores);
+ }
+
+ /**
+ * Provides common functionality for asynchronously performing a request.
+ */
+ private void internalPerformRequestAsync(Req request,
+ CheckedFunction requestConverter,
+ RequestOptions options,
+ CheckedFunction responseConverter,
+ ActionListener listener, Set ignores) {
Request req;
try {
req = requestConverter.apply(request);
@@ -1026,6 +1194,7 @@ protected final void performRequestAsync(Req r
client.performRequestAsync(req, responseListener);
}
+
final ResponseListener wrapResponseListener(CheckedFunction responseConverter,
ActionListener actionListener, Set ignores) {
return new ResponseListener() {
@@ -1108,15 +1277,6 @@ protected final Resp parseEntity(final HttpEntity entity,
}
}
- private static RequestOptions optionsForHeaders(Header[] headers) {
- RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder();
- for (Header header : headers) {
- Objects.requireNonNull(header, "header cannot be null");
- options.addHeader(header.getName(), header.getValue());
- }
- return options.build();
- }
-
static boolean convertExistsResponse(Response response) {
return response.getStatusLine().getStatusCode() == 200;
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/TimedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/TimedRequest.java
new file mode 100644
index 0000000000000..af8fbe3e72b37
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/TimedRequest.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client;
+
+import org.elasticsearch.common.unit.TimeValue;
+
+/**
+ * A base request for any requests that supply timeouts.
+ *
+ * Please note, any requests that use a ackTimeout should set timeout as they
+ * represent the same backing field on the server.
+ */
+public class TimedRequest implements Validatable {
+
+ private TimeValue timeout;
+ private TimeValue masterTimeout;
+
+ public void setTimeout(TimeValue timeout) {
+ this.timeout = timeout;
+
+ }
+
+ public void setMasterTimeout(TimeValue masterTimeout) {
+ this.masterTimeout = masterTimeout;
+ }
+
+ /**
+ * Returns the request timeout
+ */
+ public TimeValue timeout() {
+ return timeout;
+ }
+
+ /**
+ * Returns the timeout for the request to be completed on the master node
+ */
+ public TimeValue masterNodeTimeout() {
+ return masterTimeout;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java
new file mode 100644
index 0000000000000..fe4a1fc42cb3b
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Validatable.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client;
+
+import java.util.Optional;
+
+/**
+ * Defines a validation layer for Requests.
+ */
+public interface Validatable {
+ /**
+ * Perform validation. This method does not have to be overridden in the event that no validation needs to be done,
+ * or the validation was done during object construction time. A {@link ValidationException} that is not null is
+ * assumed to contain validation errors and will be thrown.
+ *
+ * @return An {@link Optional} {@link ValidationException} that contains a list of validation errors.
+ */
+ default Optional validate() {
+ return Optional.empty();
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java
new file mode 100644
index 0000000000000..6b5d738d67565
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Encapsulates an accumulation of validation errors
+ */
+public class ValidationException extends IllegalArgumentException {
+ private final List validationErrors = new ArrayList<>();
+
+ /**
+ * Add a new validation error to the accumulating validation errors
+ * @param error the error to add
+ */
+ public void addValidationError(String error) {
+ validationErrors.add(error);
+ }
+
+ /**
+ * Returns the validation errors accumulated
+ */
+ public final List validationErrors() {
+ return validationErrors;
+ }
+
+ @Override
+ public final String getMessage() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("Validation Failed: ");
+ int index = 0;
+ for (String error : validationErrors) {
+ sb.append(++index).append(": ").append(error).append(";");
+ }
+ return sb.toString();
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java
new file mode 100644
index 0000000000000..1b609797dd6fe
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Abstract class that provides a list of results and their count.
+ */
+public abstract class AbstractResultResponse extends ActionResponse implements ToXContentObject {
+
+ public static final ParseField COUNT = new ParseField("count");
+
+ private final ParseField resultsField;
+ protected final List results;
+ protected final long count;
+
+ AbstractResultResponse(ParseField resultsField, List results, long count) {
+ this.resultsField = Objects.requireNonNull(resultsField,
+ "[results_field] must not be null");
+ this.results = Collections.unmodifiableList(results);
+ this.count = count;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(COUNT.getPreferredName(), count);
+ builder.field(resultsField.getPreferredName(), results);
+ builder.endObject();
+ return builder;
+ }
+
+ public long count() {
+ return count;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java
new file mode 100644
index 0000000000000..19f3df8e4320f
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java
@@ -0,0 +1,195 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.security.InvalidParameterException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Request to close Machine Learning Jobs
+ */
+public class CloseJobRequest extends ActionRequest implements ToXContentObject {
+
+ public static final ParseField JOB_ID = new ParseField("job_id");
+ public static final ParseField TIMEOUT = new ParseField("timeout");
+ public static final ParseField FORCE = new ParseField("force");
+ public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "close_job_request",
+ true, a -> new CloseJobRequest((List) a[0]));
+
+ static {
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())),
+ JOB_ID, ObjectParser.ValueType.STRING_ARRAY);
+ PARSER.declareString((obj, val) -> obj.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT);
+ PARSER.declareBoolean(CloseJobRequest::setForce, FORCE);
+ PARSER.declareBoolean(CloseJobRequest::setAllowNoJobs, ALLOW_NO_JOBS);
+ }
+
+ private static final String ALL_JOBS = "_all";
+
+ private final List jobIds;
+ private TimeValue timeout;
+ private Boolean force;
+ private Boolean allowNoJobs;
+
+ /**
+ * Explicitly close all jobs
+ *
+ * @return a {@link CloseJobRequest} for all existing jobs
+ */
+ public static CloseJobRequest closeAllJobsRequest(){
+ return new CloseJobRequest(ALL_JOBS);
+ }
+
+ CloseJobRequest(List jobIds) {
+ if (jobIds.isEmpty()) {
+ throw new InvalidParameterException("jobIds must not be empty");
+ }
+ if (jobIds.stream().anyMatch(Objects::isNull)) {
+ throw new NullPointerException("jobIds must not contain null values");
+ }
+ this.jobIds = new ArrayList<>(jobIds);
+ }
+
+ /**
+ * Close the specified Jobs via their unique jobIds
+ *
+ * @param jobIds must be non-null and non-empty and each jobId must be non-null
+ */
+ public CloseJobRequest(String... jobIds) {
+ this(Arrays.asList(jobIds));
+ }
+
+ /**
+ * All the jobIds to be closed
+ */
+ public List getJobIds() {
+ return jobIds;
+ }
+
+ public TimeValue getTimeout() {
+ return timeout;
+ }
+
+ /**
+ * How long to wait for the close request to complete before timing out.
+ *
+ * @param timeout Default value: 30 minutes
+ */
+ public void setTimeout(TimeValue timeout) {
+ this.timeout = timeout;
+ }
+
+ public Boolean isForce() {
+ return force;
+ }
+
+ /**
+ * Should the closing be forced.
+ *
+ * Use to close a failed job, or to forcefully close a job which has not responded to its initial close request.
+ *
+ * @param force When {@code true} forcefully close the job. Defaults to {@code false}
+ */
+ public void setForce(boolean force) {
+ this.force = force;
+ }
+
+ public Boolean isAllowNoJobs() {
+ return this.allowNoJobs;
+ }
+
+ /**
+ * Whether to ignore if a wildcard expression matches no jobs.
+ *
+ * This includes `_all` string or when no jobs have been specified
+ *
+ * @param allowNoJobs When {@code true} ignore if wildcard or `_all` matches no jobs. Defaults to {@code true}
+ */
+ public void setAllowNoJobs(boolean allowNoJobs) {
+ this.allowNoJobs = allowNoJobs;
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobIds, timeout, force, allowNoJobs);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+
+ CloseJobRequest that = (CloseJobRequest) other;
+ return Objects.equals(jobIds, that.jobIds) &&
+ Objects.equals(timeout, that.timeout) &&
+ Objects.equals(force, that.force) &&
+ Objects.equals(allowNoJobs, that.allowNoJobs);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(JOB_ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds));
+ if (timeout != null) {
+ builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep());
+ }
+ if (force != null) {
+ builder.field(FORCE.getPreferredName(), force);
+ }
+ if (allowNoJobs != null) {
+ builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public String toString() {
+ return Strings.toString(this);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java
new file mode 100644
index 0000000000000..2ac1e0faee34f
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Response indicating if the Job(s) closed or not
+ */
+public class CloseJobResponse extends ActionResponse implements ToXContentObject {
+
+ private static final ParseField CLOSED = new ParseField("closed");
+
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("close_job_response", true, (a) -> new CloseJobResponse((Boolean)a[0]));
+
+ static {
+ PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), CLOSED);
+ }
+
+ private boolean closed;
+
+ public CloseJobResponse(boolean closed) {
+ this.closed = closed;
+ }
+
+ public static CloseJobResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ /**
+ * Has the job closed or not
+ * @return boolean value indicating the job closed status
+ */
+ public boolean isClosed() {
+ return closed;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+
+ CloseJobResponse that = (CloseJobResponse) other;
+ return isClosed() == that.isClosed();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(isClosed());
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(CLOSED.getPreferredName(), closed);
+ builder.endObject();
+ return builder;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java
new file mode 100644
index 0000000000000..a355f7ec659bb
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+
+import java.util.Objects;
+
+/**
+ * Request to delete a Machine Learning Job via its ID
+ */
+public class DeleteJobRequest extends ActionRequest {
+
+ private String jobId;
+ private boolean force;
+
+ public DeleteJobRequest(String jobId) {
+ this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null");
+ }
+
+ public String getJobId() {
+ return jobId;
+ }
+
+ /**
+ * The jobId which to delete
+ * @param jobId unique jobId to delete, must not be null
+ */
+ public void setJobId(String jobId) {
+ this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null");
+ }
+
+ public boolean isForce() {
+ return force;
+ }
+
+ /**
+ * Used to forcefully delete an opened job.
+ * This method is quicker than closing and deleting the job.
+ *
+ * @param force When {@code true} forcefully delete an opened job. Defaults to {@code false}
+ */
+ public void setForce(boolean force) {
+ this.force = force;
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobId, force);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || obj.getClass() != getClass()) {
+ return false;
+ }
+
+ DeleteJobRequest other = (DeleteJobRequest) obj;
+ return Objects.equals(jobId, other.jobId) && Objects.equals(force, other.force);
+ }
+
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java
new file mode 100644
index 0000000000000..86cafd9e09315
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.support.master.AcknowledgedResponse;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Response acknowledging the Machine Learning Job request
+ */
+public class DeleteJobResponse extends AcknowledgedResponse {
+
+ public DeleteJobResponse(boolean acknowledged) {
+ super(acknowledged);
+ }
+
+ public DeleteJobResponse() {
+ }
+
+ public static DeleteJobResponse fromXContent(XContentParser parser) throws IOException {
+ AcknowledgedResponse response = AcknowledgedResponse.fromXContent(parser);
+ return new DeleteJobResponse(response.isAcknowledged());
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+
+ DeleteJobResponse that = (DeleteJobResponse) other;
+ return isAcknowledged() == that.isAcknowledged();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(isAcknowledged());
+ }
+
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java
new file mode 100644
index 0000000000000..067851d452666
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java
@@ -0,0 +1,195 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Request object to flush a given Machine Learning job.
+ */
+public class FlushJobRequest extends ActionRequest implements ToXContentObject {
+
+ public static final ParseField CALC_INTERIM = new ParseField("calc_interim");
+ public static final ParseField START = new ParseField("start");
+ public static final ParseField END = new ParseField("end");
+ public static final ParseField ADVANCE_TIME = new ParseField("advance_time");
+ public static final ParseField SKIP_TIME = new ParseField("skip_time");
+
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("flush_job_request", (a) -> new FlushJobRequest((String) a[0]));
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
+ PARSER.declareBoolean(FlushJobRequest::setCalcInterim, CALC_INTERIM);
+ PARSER.declareString(FlushJobRequest::setStart, START);
+ PARSER.declareString(FlushJobRequest::setEnd, END);
+ PARSER.declareString(FlushJobRequest::setAdvanceTime, ADVANCE_TIME);
+ PARSER.declareString(FlushJobRequest::setSkipTime, SKIP_TIME);
+ }
+
+ private final String jobId;
+ private Boolean calcInterim;
+ private String start;
+ private String end;
+ private String advanceTime;
+ private String skipTime;
+
+ /**
+ * Create new Flush job request
+ *
+ * @param jobId The job ID of the job to flush
+ */
+ public FlushJobRequest(String jobId) {
+ this.jobId = jobId;
+ }
+
+ public String getJobId() {
+ return jobId;
+ }
+
+ public boolean getCalcInterim() {
+ return calcInterim;
+ }
+
+ /**
+ * When {@code true} calculates the interim results for the most recent bucket or all buckets within the latency period.
+ *
+ * @param calcInterim defaults to {@code false}.
+ */
+ public void setCalcInterim(boolean calcInterim) {
+ this.calcInterim = calcInterim;
+ }
+
+ public String getStart() {
+ return start;
+ }
+
+ /**
+ * When used in conjunction with {@link FlushJobRequest#calcInterim},
+ * specifies the start of the range of buckets on which to calculate interim results.
+ *
+ * @param start the beginning of the range of buckets; may be an epoch seconds, epoch millis or an ISO string
+ */
+ public void setStart(String start) {
+ this.start = start;
+ }
+
+ public String getEnd() {
+ return end;
+ }
+
+ /**
+ * When used in conjunction with {@link FlushJobRequest#calcInterim}, specifies the end of the range
+ * of buckets on which to calculate interim results
+ *
+ * @param end the end of the range of buckets; may be an epoch seconds, epoch millis or an ISO string
+ */
+ public void setEnd(String end) {
+ this.end = end;
+ }
+
+ public String getAdvanceTime() {
+ return advanceTime;
+ }
+
+ /**
+ * Specifies to advance to a particular time value.
+ * Results are generated and the model is updated for data from the specified time interval.
+ *
+ * @param advanceTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
+ */
+ public void setAdvanceTime(String advanceTime) {
+ this.advanceTime = advanceTime;
+ }
+
+ public String getSkipTime() {
+ return skipTime;
+ }
+
+ /**
+ * Specifies to skip to a particular time value.
+ * Results are not generated and the model is not updated for data from the specified time interval.
+ *
+ * @param skipTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
+ */
+ public void setSkipTime(String skipTime) {
+ this.skipTime = skipTime;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobId, calcInterim, start, end, advanceTime, skipTime);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ FlushJobRequest other = (FlushJobRequest) obj;
+ return Objects.equals(jobId, other.jobId) &&
+ calcInterim == other.calcInterim &&
+ Objects.equals(start, other.start) &&
+ Objects.equals(end, other.end) &&
+ Objects.equals(advanceTime, other.advanceTime) &&
+ Objects.equals(skipTime, other.skipTime);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(Job.ID.getPreferredName(), jobId);
+ if (calcInterim != null) {
+ builder.field(CALC_INTERIM.getPreferredName(), calcInterim);
+ }
+ if (start != null) {
+ builder.field(START.getPreferredName(), start);
+ }
+ if (end != null) {
+ builder.field(END.getPreferredName(), end);
+ }
+ if (advanceTime != null) {
+ builder.field(ADVANCE_TIME.getPreferredName(), advanceTime);
+ }
+ if (skipTime != null) {
+ builder.field(SKIP_TIME.getPreferredName(), skipTime);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java
new file mode 100644
index 0000000000000..048b07b504ae0
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.Date;
+import java.util.Objects;
+
+/**
+ * Response object containing flush acknowledgement and additional data
+ */
+public class FlushJobResponse extends ActionResponse implements ToXContentObject {
+
+ public static final ParseField FLUSHED = new ParseField("flushed");
+ public static final ParseField LAST_FINALIZED_BUCKET_END = new ParseField("last_finalized_bucket_end");
+
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("flush_job_response",
+ true,
+ (a) -> {
+ boolean flushed = (boolean) a[0];
+ Date date = a[1] == null ? null : new Date((long) a[1]);
+ return new FlushJobResponse(flushed, date);
+ });
+
+ static {
+ PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), FLUSHED);
+ PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_FINALIZED_BUCKET_END);
+ }
+
+ public static FlushJobResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ private final boolean flushed;
+ private final Date lastFinalizedBucketEnd;
+
+ public FlushJobResponse(boolean flushed, @Nullable Date lastFinalizedBucketEnd) {
+ this.flushed = flushed;
+ this.lastFinalizedBucketEnd = lastFinalizedBucketEnd;
+ }
+
+ /**
+ * Was the job successfully flushed or not
+ */
+ public boolean isFlushed() {
+ return flushed;
+ }
+
+ /**
+ * Provides the timestamp (in milliseconds-since-the-epoch) of the end of the last bucket that was processed.
+ */
+ @Nullable
+ public Date getLastFinalizedBucketEnd() {
+ return lastFinalizedBucketEnd;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(flushed, lastFinalizedBucketEnd);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+
+ FlushJobResponse that = (FlushJobResponse) other;
+ return that.flushed == flushed && Objects.equals(lastFinalizedBucketEnd, that.lastFinalizedBucketEnd);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(FLUSHED.getPreferredName(), flushed);
+ if (lastFinalizedBucketEnd != null) {
+ builder.timeField(LAST_FINALIZED_BUCKET_END.getPreferredName(),
+ LAST_FINALIZED_BUCKET_END.getPreferredName() + "_string", lastFinalizedBucketEnd.getTime());
+ }
+ builder.endObject();
+ return builder;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java
new file mode 100644
index 0000000000000..f50d92d58dda5
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java
@@ -0,0 +1,268 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.client.ml.job.results.Result;
+import org.elasticsearch.client.ml.job.util.PageParams;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * A request to retrieve buckets of a given job
+ */
+public class GetBucketsRequest extends ActionRequest implements ToXContentObject {
+
+ public static final ParseField EXPAND = new ParseField("expand");
+ public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim");
+ public static final ParseField START = new ParseField("start");
+ public static final ParseField END = new ParseField("end");
+ public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
+ public static final ParseField TIMESTAMP = new ParseField("timestamp");
+ public static final ParseField SORT = new ParseField("sort");
+ public static final ParseField DESCENDING = new ParseField("desc");
+
+ public static final ObjectParser PARSER = new ObjectParser<>("get_buckets_request", GetBucketsRequest::new);
+
+ static {
+ PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
+ PARSER.declareString(GetBucketsRequest::setTimestamp, Result.TIMESTAMP);
+ PARSER.declareBoolean(GetBucketsRequest::setExpand, EXPAND);
+ PARSER.declareBoolean(GetBucketsRequest::setExcludeInterim, EXCLUDE_INTERIM);
+ PARSER.declareStringOrNull(GetBucketsRequest::setStart, START);
+ PARSER.declareStringOrNull(GetBucketsRequest::setEnd, END);
+ PARSER.declareObject(GetBucketsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE);
+ PARSER.declareDouble(GetBucketsRequest::setAnomalyScore, ANOMALY_SCORE);
+ PARSER.declareString(GetBucketsRequest::setSort, SORT);
+ PARSER.declareBoolean(GetBucketsRequest::setDescending, DESCENDING);
+ }
+
+ private String jobId;
+ private String timestamp;
+ private Boolean expand;
+ private Boolean excludeInterim;
+ private String start;
+ private String end;
+ private PageParams pageParams;
+ private Double anomalyScore;
+ private String sort;
+ private Boolean descending;
+
+ private GetBucketsRequest() {}
+
+ /**
+ * Constructs a request to retrieve buckets of a given job
+ * @param jobId id of the job to retrieve buckets of
+ */
+ public GetBucketsRequest(String jobId) {
+ this.jobId = Objects.requireNonNull(jobId);
+ }
+
+ public String getJobId() {
+ return jobId;
+ }
+
+ /**
+ * Sets the timestamp of a specific bucket to be retrieved.
+ * @param timestamp the timestamp of a specific bucket to be retrieved
+ */
+ public void setTimestamp(String timestamp) {
+ this.timestamp = timestamp;
+ }
+
+ public String getTimestamp() {
+ return timestamp;
+ }
+
+ public boolean isExpand() {
+ return expand;
+ }
+
+ /**
+ * Sets the value of "expand".
+ * When {@code true}, buckets will be expanded to include their records.
+ * @param expand value of "expand" to be set
+ */
+ public void setExpand(boolean expand) {
+ this.expand = expand;
+ }
+
+ public boolean isExcludeInterim() {
+ return excludeInterim;
+ }
+
+ /**
+ * Sets the value of "exclude_interim".
+ * When {@code true}, interim buckets will be filtered out.
+ * @param excludeInterim value of "exclude_interim" to be set
+ */
+ public void setExcludeInterim(boolean excludeInterim) {
+ this.excludeInterim = excludeInterim;
+ }
+
+ public String getStart() {
+ return start;
+ }
+
+ /**
+ * Sets the value of "start" which is a timestamp.
+ * Only buckets whose timestamp is on or after the "start" value will be returned.
+ * @param start value of "start" to be set
+ */
+ public void setStart(String start) {
+ this.start = start;
+ }
+
+ public String getEnd() {
+ return end;
+ }
+
+ /**
+ * Sets the value of "end" which is a timestamp.
+ * Only buckets whose timestamp is before the "end" value will be returned.
+ * @param end value of "end" to be set
+ */
+ public void setEnd(String end) {
+ this.end = end;
+ }
+
+ public PageParams getPageParams() {
+ return pageParams;
+ }
+
+ /**
+ * Sets the paging parameters
+ * @param pageParams the paging parameters
+ */
+ public void setPageParams(PageParams pageParams) {
+ this.pageParams = pageParams;
+ }
+
+ public Double getAnomalyScore() {
+ return anomalyScore;
+ }
+
+ /**
+ * Sets the value of "anomaly_score".
+ * Only buckets with "anomaly_score" equal or greater will be returned.
+ * @param anomalyScore value of "anomaly_score".
+ */
+ public void setAnomalyScore(double anomalyScore) {
+ this.anomalyScore = anomalyScore;
+ }
+
+ public String getSort() {
+ return sort;
+ }
+
+ /**
+ * Sets the value of "sort".
+ * Specifies the bucket field to sort on.
+ * @param sort value of "sort".
+ */
+ public void setSort(String sort) {
+ this.sort = sort;
+ }
+
+ public boolean isDescending() {
+ return descending;
+ }
+
+ /**
+ * Sets the value of "desc".
+ * Specifies the sorting order.
+ * @param descending value of "desc"
+ */
+ public void setDescending(boolean descending) {
+ this.descending = descending;
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(Job.ID.getPreferredName(), jobId);
+ if (timestamp != null) {
+ builder.field(Result.TIMESTAMP.getPreferredName(), timestamp);
+ }
+ if (expand != null) {
+ builder.field(EXPAND.getPreferredName(), expand);
+ }
+ if (excludeInterim != null) {
+ builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim);
+ }
+ if (start != null) {
+ builder.field(START.getPreferredName(), start);
+ }
+ if (end != null) {
+ builder.field(END.getPreferredName(), end);
+ }
+ if (pageParams != null) {
+ builder.field(PageParams.PAGE.getPreferredName(), pageParams);
+ }
+ if (anomalyScore != null) {
+ builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
+ }
+ if (sort != null) {
+ builder.field(SORT.getPreferredName(), sort);
+ }
+ if (descending != null) {
+ builder.field(DESCENDING.getPreferredName(), descending);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobId, timestamp, expand, excludeInterim, anomalyScore, pageParams, start, end, sort, descending);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ GetBucketsRequest other = (GetBucketsRequest) obj;
+ return Objects.equals(jobId, other.jobId) &&
+ Objects.equals(timestamp, other.timestamp) &&
+ Objects.equals(expand, other.expand) &&
+ Objects.equals(excludeInterim, other.excludeInterim) &&
+ Objects.equals(anomalyScore, other.anomalyScore) &&
+ Objects.equals(pageParams, other.pageParams) &&
+ Objects.equals(start, other.start) &&
+ Objects.equals(end, other.end) &&
+ Objects.equals(sort, other.sort) &&
+ Objects.equals(descending, other.descending);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java
new file mode 100644
index 0000000000000..de8736b86d925
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.client.ml.job.results.Bucket;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * A response containing the requested buckets
+ */
+public class GetBucketsResponse extends AbstractResultResponse {
+
+ public static final ParseField BUCKETS = new ParseField("buckets");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("get_buckets_response",
+ true, a -> new GetBucketsResponse((List) a[0], (long) a[1]));
+
+ static {
+ PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), Bucket.PARSER, BUCKETS);
+ PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT);
+ }
+
+ public static GetBucketsResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ GetBucketsResponse(List buckets, long count) {
+ super(BUCKETS, buckets, count);
+ }
+
+ /**
+ * The retrieved buckets
+ * @return the retrieved buckets
+ */
+ public List buckets() {
+ return results;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(count, results);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ GetBucketsResponse other = (GetBucketsResponse) obj;
+ return count == other.count && Objects.equals(results, other.results);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java
new file mode 100644
index 0000000000000..3de7037e5c8f3
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Request object to get {@link Job} objects with the matching `jobId`s or
+ * `groupName`s.
+ *
+ * `_all` explicitly gets all the jobs in the cluster
+ * An empty request (no `jobId`s) implicitly gets all the jobs in the cluster
+ */
+public class GetJobRequest extends ActionRequest implements ToXContentObject {
+
+ public static final ParseField JOB_IDS = new ParseField("job_ids");
+ public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
+
+ private static final String ALL_JOBS = "_all";
+ private final List jobIds;
+ private Boolean allowNoJobs;
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "get_job_request",
+ true, a -> new GetJobRequest(a[0] == null ? new ArrayList<>() : (List) a[0]));
+
+ static {
+ PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), JOB_IDS);
+ PARSER.declareBoolean(GetJobRequest::setAllowNoJobs, ALLOW_NO_JOBS);
+ }
+
+ /**
+ * Helper method to create a query that will get ALL jobs
+ * @return new {@link GetJobRequest} object searching for the jobId "_all"
+ */
+ public static GetJobRequest getAllJobsRequest() {
+ return new GetJobRequest(ALL_JOBS);
+ }
+
+ /**
+ * Get the specified {@link Job} configurations via their unique jobIds
+ * @param jobIds must not contain any null values
+ */
+ public GetJobRequest(String... jobIds) {
+ this(Arrays.asList(jobIds));
+ }
+
+ GetJobRequest(List jobIds) {
+ if (jobIds.stream().anyMatch(Objects::isNull)) {
+ throw new NullPointerException("jobIds must not contain null values");
+ }
+ this.jobIds = new ArrayList<>(jobIds);
+ }
+
+ /**
+ * All the jobIds for which to get configuration information
+ */
+ public List getJobIds() {
+ return jobIds;
+ }
+
+ /**
+ * Whether to ignore if a wildcard expression matches no jobs.
+ *
+ * @param allowNoJobs If this is {@code false}, then an error is returned when a wildcard (or `_all`) does not match any jobs
+ */
+ public void setAllowNoJobs(boolean allowNoJobs) {
+ this.allowNoJobs = allowNoJobs;
+ }
+
+ public Boolean isAllowNoJobs() {
+ return allowNoJobs;
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobIds, allowNoJobs);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || other.getClass() != getClass()) {
+ return false;
+ }
+
+ GetJobRequest that = (GetJobRequest) other;
+ return Objects.equals(jobIds, that.jobIds) &&
+ Objects.equals(allowNoJobs, that.allowNoJobs);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+
+ if (jobIds.isEmpty() == false) {
+ builder.field(JOB_IDS.getPreferredName(), jobIds);
+ }
+
+ if (allowNoJobs != null) {
+ builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs);
+ }
+
+ builder.endObject();
+ return builder;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java
new file mode 100644
index 0000000000000..0cdf08c6c24a4
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+
+/**
+ * Contains a {@link List} of the found {@link Job} objects and the total count found
+ */
+public class GetJobResponse extends AbstractResultResponse {
+
+ public static final ParseField RESULTS_FIELD = new ParseField("jobs");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("jobs_response", true,
+ a -> new GetJobResponse((List) a[0], (long) a[1]));
+
+ static {
+ PARSER.declareObjectArray(constructorArg(), Job.PARSER, RESULTS_FIELD);
+ PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT);
+ }
+
+ GetJobResponse(List jobBuilders, long count) {
+ super(RESULTS_FIELD, jobBuilders.stream().map(Job.Builder::build).collect(Collectors.toList()), count);
+ }
+
+ /**
+ * The collection of {@link Job} objects found in the query
+ */
+ public List jobs() {
+ return results;
+ }
+
+ public static GetJobResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(results, count);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ GetJobResponse other = (GetJobResponse) obj;
+ return Objects.equals(results, other.results) && count == other.count;
+ }
+
+ @Override
+ public final String toString() {
+ return Strings.toString(this);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java
new file mode 100644
index 0000000000000..d8eb350755dcb
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+
+
+/**
+ * Request object to get {@link org.elasticsearch.client.ml.job.stats.JobStats} by their respective jobIds
+ *
+ * `_all` explicitly gets all the jobs' statistics in the cluster
+ * An empty request (no `jobId`s) implicitly gets all the jobs' statistics in the cluster
+ */
+public class GetJobStatsRequest extends ActionRequest implements ToXContentObject {
+
+ public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "get_jobs_stats_request", a -> new GetJobStatsRequest((List) a[0]));
+
+ static {
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())),
+ Job.ID, ObjectParser.ValueType.STRING_ARRAY);
+ PARSER.declareBoolean(GetJobStatsRequest::setAllowNoJobs, ALLOW_NO_JOBS);
+ }
+
+ private static final String ALL_JOBS = "_all";
+
+ private final List jobIds;
+ private Boolean allowNoJobs;
+
+ /**
+ * Explicitly gets all jobs statistics
+ *
+ * @return a {@link GetJobStatsRequest} for all existing jobs
+ */
+ public static GetJobStatsRequest getAllJobStatsRequest(){
+ return new GetJobStatsRequest(ALL_JOBS);
+ }
+
+ GetJobStatsRequest(List jobIds) {
+ if (jobIds.stream().anyMatch(Objects::isNull)) {
+ throw new NullPointerException("jobIds must not contain null values");
+ }
+ this.jobIds = new ArrayList<>(jobIds);
+ }
+
+ /**
+ * Get the specified Job's statistics via their unique jobIds
+ *
+ * @param jobIds must be non-null and each jobId must be non-null
+ */
+ public GetJobStatsRequest(String... jobIds) {
+ this(Arrays.asList(jobIds));
+ }
+
+ /**
+ * All the jobIds for which to get statistics
+ */
+ public List getJobIds() {
+ return jobIds;
+ }
+
+ public Boolean isAllowNoJobs() {
+ return this.allowNoJobs;
+ }
+
+ /**
+ * Whether to ignore if a wildcard expression matches no jobs.
+ *
+ * This includes `_all` string or when no jobs have been specified
+ *
+ * @param allowNoJobs When {@code true} ignore if wildcard or `_all` matches no jobs. Defaults to {@code true}
+ */
+ public void setAllowNoJobs(boolean allowNoJobs) {
+ this.allowNoJobs = allowNoJobs;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobIds, allowNoJobs);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+
+ GetJobStatsRequest that = (GetJobStatsRequest) other;
+ return Objects.equals(jobIds, that.jobIds) &&
+ Objects.equals(allowNoJobs, that.allowNoJobs);
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds));
+ if (allowNoJobs != null) {
+ builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs);
+ }
+ builder.endObject();
+ return builder;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java
new file mode 100644
index 0000000000000..2e3ba113d193c
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.client.ml.job.stats.JobStats;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+
+/**
+ * Contains a {@link List} of the found {@link JobStats} objects and the total count found
+ */
+public class GetJobStatsResponse extends AbstractResultResponse {
+
+ public static final ParseField RESULTS_FIELD = new ParseField("jobs");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("jobs_stats_response", true,
+ a -> new GetJobStatsResponse((List) a[0], (long) a[1]));
+
+ static {
+ PARSER.declareObjectArray(constructorArg(), JobStats.PARSER, RESULTS_FIELD);
+ PARSER.declareLong(constructorArg(), COUNT);
+ }
+
+ GetJobStatsResponse(List jobStats, long count) {
+ super(RESULTS_FIELD, jobStats, count);
+ }
+
+ /**
+ * The collection of {@link JobStats} objects found in the query
+ */
+ public List jobStats() {
+ return results;
+ }
+
+ public static GetJobStatsResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(results, count);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ GetJobStatsResponse other = (GetJobStatsResponse) obj;
+ return Objects.equals(results, other.results) && count == other.count;
+ }
+
+ @Override
+ public final String toString() {
+ return Strings.toString(this);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java
new file mode 100644
index 0000000000000..0a701f5a1433a
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java
@@ -0,0 +1,222 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.client.Validatable;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.client.ml.job.util.PageParams;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * A request to retrieve records of a given job
+ */
+public class GetRecordsRequest implements ToXContentObject, Validatable {
+
+ public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim");
+ public static final ParseField START = new ParseField("start");
+ public static final ParseField END = new ParseField("end");
+ public static final ParseField RECORD_SCORE = new ParseField("record_score");
+ public static final ParseField SORT = new ParseField("sort");
+ public static final ParseField DESCENDING = new ParseField("desc");
+
+ public static final ObjectParser PARSER = new ObjectParser<>("get_buckets_request", GetRecordsRequest::new);
+
+ static {
+ PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
+ PARSER.declareBoolean(GetRecordsRequest::setExcludeInterim, EXCLUDE_INTERIM);
+ PARSER.declareStringOrNull(GetRecordsRequest::setStart, START);
+ PARSER.declareStringOrNull(GetRecordsRequest::setEnd, END);
+ PARSER.declareObject(GetRecordsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE);
+ PARSER.declareDouble(GetRecordsRequest::setRecordScore, RECORD_SCORE);
+ PARSER.declareString(GetRecordsRequest::setSort, SORT);
+ PARSER.declareBoolean(GetRecordsRequest::setDescending, DESCENDING);
+ }
+
+ private String jobId;
+ private Boolean excludeInterim;
+ private String start;
+ private String end;
+ private PageParams pageParams;
+ private Double recordScore;
+ private String sort;
+ private Boolean descending;
+
+ private GetRecordsRequest() {}
+
+ /**
+ * Constructs a request to retrieve records of a given job
+ * @param jobId id of the job to retrieve records of
+ */
+ public GetRecordsRequest(String jobId) {
+ this.jobId = Objects.requireNonNull(jobId);
+ }
+
+ public String getJobId() {
+ return jobId;
+ }
+
+ public boolean isExcludeInterim() {
+ return excludeInterim;
+ }
+
+ /**
+ * Sets the value of "exclude_interim".
+ * When {@code true}, interim records will be filtered out.
+ * @param excludeInterim value of "exclude_interim" to be set
+ */
+ public void setExcludeInterim(boolean excludeInterim) {
+ this.excludeInterim = excludeInterim;
+ }
+
+ public String getStart() {
+ return start;
+ }
+
+ /**
+ * Sets the value of "start" which is a timestamp.
+ * Only records whose timestamp is on or after the "start" value will be returned.
+ * @param start value of "start" to be set
+ */
+ public void setStart(String start) {
+ this.start = start;
+ }
+
+ public String getEnd() {
+ return end;
+ }
+
+ /**
+ * Sets the value of "end" which is a timestamp.
+ * Only records whose timestamp is before the "end" value will be returned.
+ * @param end value of "end" to be set
+ */
+ public void setEnd(String end) {
+ this.end = end;
+ }
+
+ public PageParams getPageParams() {
+ return pageParams;
+ }
+
+ /**
+ * Sets the paging parameters
+ * @param pageParams The paging parameters
+ */
+ public void setPageParams(PageParams pageParams) {
+ this.pageParams = pageParams;
+ }
+
+ public Double getRecordScore() {
+ return recordScore;
+ }
+
+ /**
+ * Sets the value of "record_score".
+ * Only records with "record_score" equal or greater will be returned.
+ * @param recordScore value of "record_score".
+ */
+ public void setRecordScore(double recordScore) {
+ this.recordScore = recordScore;
+ }
+
+ public String getSort() {
+ return sort;
+ }
+
+ /**
+ * Sets the value of "sort".
+ * Specifies the bucket field to sort on.
+ * @param sort value of "sort".
+ */
+ public void setSort(String sort) {
+ this.sort = sort;
+ }
+
+ public boolean isDescending() {
+ return descending;
+ }
+
+ /**
+ * Sets the value of "desc".
+ * Specifies the sorting order.
+ * @param descending value of "desc"
+ */
+ public void setDescending(boolean descending) {
+ this.descending = descending;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(Job.ID.getPreferredName(), jobId);
+ if (excludeInterim != null) {
+ builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim);
+ }
+ if (start != null) {
+ builder.field(START.getPreferredName(), start);
+ }
+ if (end != null) {
+ builder.field(END.getPreferredName(), end);
+ }
+ if (pageParams != null) {
+ builder.field(PageParams.PAGE.getPreferredName(), pageParams);
+ }
+ if (recordScore != null) {
+ builder.field(RECORD_SCORE.getPreferredName(), recordScore);
+ }
+ if (sort != null) {
+ builder.field(SORT.getPreferredName(), sort);
+ }
+ if (descending != null) {
+ builder.field(DESCENDING.getPreferredName(), descending);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobId, excludeInterim, recordScore, pageParams, start, end, sort, descending);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ GetRecordsRequest other = (GetRecordsRequest) obj;
+ return Objects.equals(jobId, other.jobId) &&
+ Objects.equals(excludeInterim, other.excludeInterim) &&
+ Objects.equals(recordScore, other.recordScore) &&
+ Objects.equals(pageParams, other.pageParams) &&
+ Objects.equals(start, other.start) &&
+ Objects.equals(end, other.end) &&
+ Objects.equals(sort, other.sort) &&
+ Objects.equals(descending, other.descending);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java
new file mode 100644
index 0000000000000..99e1152422609
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.client.ml.job.results.AnomalyRecord;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * A response containing the requested buckets
+ */
+public class GetRecordsResponse extends AbstractResultResponse {
+
+ public static final ParseField RECORDS = new ParseField("records");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("get_records_response",
+ true, a -> new GetRecordsResponse((List) a[0], (long) a[1]));
+
+ static {
+ PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARSER, RECORDS);
+ PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT);
+ }
+
+ public static GetRecordsResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ GetRecordsResponse(List buckets, long count) {
+ super(RECORDS, buckets, count);
+ }
+
+ /**
+ * The retrieved records
+ * @return the retrieved records
+ */
+ public List records() {
+ return results;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(count, results);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ GetRecordsResponse other = (GetRecordsResponse) obj;
+ return count == other.count && Objects.equals(results, other.results);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java
new file mode 100644
index 0000000000000..892df340abd6b
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java
@@ -0,0 +1,150 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * A Pojo class containing an Elastic Node's attributes
+ */
+public class NodeAttributes implements ToXContentObject {
+
+ public static final ParseField ID = new ParseField("id");
+ public static final ParseField NAME = new ParseField("name");
+ public static final ParseField EPHEMERAL_ID = new ParseField("ephemeral_id");
+ public static final ParseField TRANSPORT_ADDRESS = new ParseField("transport_address");
+ public static final ParseField ATTRIBUTES = new ParseField("attributes");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("node", true,
+ (a) -> {
+ int i = 0;
+ String id = (String) a[i++];
+ String name = (String) a[i++];
+ String ephemeralId = (String) a[i++];
+ String transportAddress = (String) a[i++];
+ Map attributes = (Map) a[i];
+ return new NodeAttributes(id, name, ephemeralId, transportAddress, attributes);
+ });
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), ID);
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME);
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), EPHEMERAL_ID);
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), TRANSPORT_ADDRESS);
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ (p, c) -> p.mapStrings(),
+ ATTRIBUTES,
+ ObjectParser.ValueType.OBJECT);
+ }
+
+ private final String id;
+ private final String name;
+ private final String ephemeralId;
+ private final String transportAddress;
+ private final Map attributes;
+
+ public NodeAttributes(String id, String name, String ephemeralId, String transportAddress, Map attributes) {
+ this.id = id;
+ this.name = name;
+ this.ephemeralId = ephemeralId;
+ this.transportAddress = transportAddress;
+ this.attributes = Collections.unmodifiableMap(attributes);
+ }
+
+ /**
+ * The unique identifier of the node.
+ */
+ public String getId() {
+ return id;
+ }
+
+ /**
+ * The node name.
+ */
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * The ephemeral id of the node.
+ */
+ public String getEphemeralId() {
+ return ephemeralId;
+ }
+
+ /**
+ * The host and port where transport HTTP connections are accepted.
+ */
+ public String getTransportAddress() {
+ return transportAddress;
+ }
+
+ /**
+ * Additional attributes related to this node e.g., {"ml.max_open_jobs": "10"}.
+ */
+ public Map getAttributes() {
+ return attributes;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(ID.getPreferredName(), id);
+ builder.field(NAME.getPreferredName(), name);
+ builder.field(EPHEMERAL_ID.getPreferredName(), ephemeralId);
+ builder.field(TRANSPORT_ADDRESS.getPreferredName(), transportAddress);
+ builder.field(ATTRIBUTES.getPreferredName(), attributes);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, name, ephemeralId, transportAddress, attributes);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+
+ NodeAttributes that = (NodeAttributes) other;
+ return Objects.equals(id, that.id) &&
+ Objects.equals(name, that.name) &&
+ Objects.equals(ephemeralId, that.ephemeralId) &&
+ Objects.equals(transportAddress, that.transportAddress) &&
+ Objects.equals(attributes, that.attributes);
+ }
+}
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/OpenJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java
similarity index 85%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/OpenJobRequest.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java
index a18a18bb55a14..5b8e68cd72dc3 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/OpenJobRequest.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java
@@ -16,23 +16,25 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml;
+package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
-import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.util.Objects;
+/**
+ * Request to open a Machine Learning Job
+ */
public class OpenJobRequest extends ActionRequest implements ToXContentObject {
public static final ParseField TIMEOUT = new ParseField("timeout");
@@ -51,6 +53,11 @@ public static OpenJobRequest fromXContent(XContentParser parser) throws IOExcept
private String jobId;
private TimeValue timeout;
+ /**
+ * Create a new request with the desired jobId
+ *
+ * @param jobId unique jobId, must not be null
+ */
public OpenJobRequest(String jobId) {
this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null");
}
@@ -59,6 +66,11 @@ public String getJobId() {
return jobId;
}
+ /**
+ * The jobId to open
+ *
+ * @param jobId unique jobId, must not be null
+ */
public void setJobId(String jobId) {
this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null");
}
@@ -67,6 +79,11 @@ public TimeValue getTimeout() {
return timeout;
}
+ /**
+ * How long to wait for job to open before timing out the request
+ *
+ * @param timeout default value of 30 minutes
+ */
public void setTimeout(TimeValue timeout) {
this.timeout = timeout;
}
@@ -77,7 +94,7 @@ public ActionRequestValidationException validate() {
}
@Override
- public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Job.ID.getPreferredName(), jobId);
if (timeout != null) {
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/OpenJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java
similarity index 78%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/OpenJobResponse.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java
index d8850ddbbe3a8..2536aeeaf78bb 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/OpenJobResponse.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java
@@ -16,11 +16,11 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml;
+package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
-import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
@@ -28,22 +28,23 @@
import java.io.IOException;
import java.util.Objects;
+/**
+ * Response indicating if the Machine Learning Job is now opened or not
+ */
public class OpenJobResponse extends ActionResponse implements ToXContentObject {
private static final ParseField OPENED = new ParseField("opened");
- public static final ObjectParser PARSER = new ObjectParser<>("open_job_response", true, OpenJobResponse::new);
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("open_job_response", true, (a) -> new OpenJobResponse((Boolean)a[0]));
static {
- PARSER.declareBoolean(OpenJobResponse::setOpened, OPENED);
+ PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), OPENED);
}
private boolean opened;
- OpenJobResponse() {
- }
-
- public OpenJobResponse(boolean opened) {
+ OpenJobResponse(boolean opened) {
this.opened = opened;
}
@@ -51,14 +52,15 @@ public static OpenJobResponse fromXContent(XContentParser parser) throws IOExcep
return PARSER.parse(parser, null);
}
+ /**
+ * Has the job opened or not
+ *
+ * @return boolean value indicating the job opened status
+ */
public boolean isOpened() {
return opened;
}
- public void setOpened(boolean opened) {
- this.opened = opened;
- }
-
@Override
public boolean equals(Object other) {
if (this == other) {
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java
similarity index 87%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobRequest.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java
index 2cdf1993fccd3..de8529de6bb8a 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobRequest.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java
@@ -16,22 +16,30 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml;
+package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.util.Objects;
+/**
+ * Request to create a new Machine Learning Job given a {@link Job} configuration
+ */
public class PutJobRequest extends ActionRequest implements ToXContentObject {
private final Job job;
+ /**
+ * Construct a new PutJobRequest
+ *
+ * @param job a {@link Job} configuration to create
+ */
public PutJobRequest(Job job) {
this.job = job;
}
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java
similarity index 84%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java
index b37bd35d6b17f..6e6cce52e58c2 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java
@@ -16,17 +16,19 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml;
+package org.elasticsearch.client.ml;
-import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.util.Objects;
+/**
+ * Response containing the newly created {@link Job}
+ */
public class PutJobResponse implements ToXContentObject {
private Job job;
@@ -35,19 +37,16 @@ public static PutJobResponse fromXContent(XContentParser parser) throws IOExcept
return new PutJobResponse(Job.PARSER.parse(parser, null).build());
}
- public PutJobResponse(Job job) {
+ PutJobResponse(Job job) {
this.job = job;
}
- public PutJobResponse() {
- }
-
public Job getResponse() {
return job;
}
@Override
- public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
job.toXContent(builder, params);
return builder;
}
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/ChunkingConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/ChunkingConfig.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java
index 0b9d9f1204614..10e7b3f974941 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/ChunkingConfig.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.datafeed;
+package org.elasticsearch.client.ml.datafeed;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java
similarity index 99%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedConfig.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java
index 929d4dacb90fa..752752b103885 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedConfig.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java
@@ -16,8 +16,9 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.datafeed;
+package org.elasticsearch.client.ml.datafeed;
+import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@@ -27,7 +28,6 @@
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.builder.SearchSourceBuilder;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java
similarity index 99%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedUpdate.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java
index 787bdf06e5ec2..184d5d51481fa 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedUpdate.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java
@@ -16,8 +16,9 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.datafeed;
+package org.elasticsearch.client.ml.datafeed;
+import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@@ -26,7 +27,6 @@
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.builder.SearchSourceBuilder;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java
similarity index 99%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfig.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java
index 00fa1bdd47fed..9b759599dda3c 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfig.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue;
@@ -300,6 +300,10 @@ public int hashCode() {
multivariateByFields);
}
+ public static Builder builder(List detectors) {
+ return new Builder(detectors);
+ }
+
public static class Builder {
private List detectors;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisLimits.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisLimits.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java
index f69b9ccbf9ff4..22d26f06fd8ce 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisLimits.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/CategorizationAnalyzerConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java
similarity index 99%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/CategorizationAnalyzerConfig.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java
index dc7f047b80404..3a2243d6548fd 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/CategorizationAnalyzerConfig.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DataDescription.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java
similarity index 99%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DataDescription.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java
index a3f8c2563b2d8..636b8c6ad5014 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DataDescription.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DefaultDetectorDescription.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DefaultDetectorDescription.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java
index 081e685fc741b..25b4fbbb2a7ee 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DefaultDetectorDescription.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.Strings;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DetectionRule.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DetectionRule.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java
index 9a73afe885b1c..bcba8a7d74a61 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DetectionRule.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Detector.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java
similarity index 99%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Detector.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java
index 3274b03877f14..e1af60269b52b 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Detector.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
@@ -265,6 +265,10 @@ public int hashCode() {
excludeFrequent, rules, detectorIndex);
}
+ public static Builder builder() {
+ return new Builder();
+ }
+
public static class Builder {
private String detectorDescription;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DetectorFunction.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java
similarity index 97%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DetectorFunction.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java
index 5d9a06948d0fb..932782101ba7c 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DetectorFunction.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import java.util.Arrays;
import java.util.Collections;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/FilterRef.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/FilterRef.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java
index 9afbdf4876fd8..b686ad92ae533 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/FilterRef.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Job.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java
similarity index 99%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Job.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java
index 6bc1be3b56384..aff74271f1c0b 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Job.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java
@@ -16,8 +16,9 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
+import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;
@@ -25,7 +26,6 @@
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.protocol.xpack.ml.job.util.TimeUtil;
import java.io.IOException;
import java.util.Collections;
@@ -412,6 +412,10 @@ public final String toString() {
return Strings.toString(this);
}
+ public static Builder builder(String id) {
+ return new Builder(id);
+ }
+
public static class Builder {
private String id;
@@ -435,7 +439,7 @@ public static class Builder {
private String resultsIndexName;
private boolean deleted;
- public Builder() {
+ private Builder() {
}
public Builder(String id) {
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java
new file mode 100644
index 0000000000000..32684bd7e62b4
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.config;
+
+import java.util.Locale;
+
+/**
+ * Jobs whether running or complete are in one of these states.
+ * When a job is created it is initialised in the state closed
+ * i.e. it is not running.
+ */
+public enum JobState {
+
+ CLOSING, CLOSED, OPENED, FAILED, OPENING;
+
+ public static JobState fromString(String name) {
+ return valueOf(name.trim().toUpperCase(Locale.ROOT));
+ }
+
+ public String value() {
+ return name().toLowerCase(Locale.ROOT);
+ }
+}
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/MlFilter.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/MlFilter.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java
index bcbc0c295c2d5..e0d1bd0849b3b 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/MlFilter.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/ModelPlotConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/ModelPlotConfig.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java
index 59b0252a7660e..b39db054b308b 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/ModelPlotConfig.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Operator.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java
similarity index 97%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Operator.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java
index c3dc52e5a3cb9..37d6275203560 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Operator.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.ParseField;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/RuleAction.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java
similarity index 95%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/RuleAction.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java
index 9e2364b4fd960..05b6ef6e19754 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/RuleAction.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import java.util.Locale;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/RuleCondition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/RuleCondition.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java
index ec19547fe13be..14389809bd2fa 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/RuleCondition.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/RuleScope.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java
similarity index 89%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/RuleScope.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java
index aa12d5ea2a2bd..8b6886d582524 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/RuleScope.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java
@@ -16,11 +16,11 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.config;
+package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ContextParser;
-import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
+import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -50,7 +50,7 @@ public static ContextParser parser() {
Map value = (Map) entry.getValue();
builder.map(value);
try (XContentParser scopeParser = XContentFactory.xContent(builder.contentType()).createParser(
- NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, Strings.toString(builder))) {
+ NamedXContentRegistry.EMPTY, DEPRECATION_HANDLER, Strings.toString(builder))) {
scope.put(entry.getKey(), FilterRef.PARSER.parse(scopeParser, null));
}
}
@@ -59,6 +59,15 @@ public static ContextParser parser() {
};
}
+ private static final DeprecationHandler DEPRECATION_HANDLER = new DeprecationHandler() {
+
+ @Override
+ public void usedDeprecatedName(String usedName, String modernName) {}
+
+ @Override
+ public void usedDeprecatedField(String usedName, String replacedWith) {}
+ };
+
private final Map scope;
public RuleScope() {
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/DataCounts.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/DataCounts.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java
index e07312d12e1f4..7afef0785fe38 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/DataCounts.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java
@@ -16,15 +16,15 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.process;
+package org.elasticsearch.client.ml.job.process;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
-import org.elasticsearch.protocol.xpack.ml.job.util.TimeUtil;
import java.io.IOException;
import java.util.Date;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java
similarity index 97%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStats.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java
index 50f655b4dd7f1..c9a34fe5c98d9 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStats.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java
@@ -16,16 +16,16 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.process;
+package org.elasticsearch.client.ml.job.process;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.client.ml.job.results.Result;
+import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
-import org.elasticsearch.protocol.xpack.ml.job.results.Result;
-import org.elasticsearch.protocol.xpack.ml.job.util.TimeUtil;
import java.io.IOException;
import java.util.Date;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java
similarity index 97%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java
index 2b9957f9bc756..603bff0d90653 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java
@@ -16,16 +16,16 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.process;
+package org.elasticsearch.client.ml.job.process;
import org.elasticsearch.Version;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
-import org.elasticsearch.protocol.xpack.ml.job.util.TimeUtil;
import java.io.IOException;
import java.util.Date;
@@ -221,10 +221,8 @@ public boolean equals(Object other) {
public static class Builder {
private String jobId;
- // Stored snapshot documents created prior to 6.3.0 will have no
- // value for min_version. We default it to 5.5.0 as there were
- // no model changes between 5.5.0 and 6.3.0.
- private Version minVersion = Version.V_5_5_0;
+ // Stored snapshot documents created prior to 6.3.0 will have no value for min_version.
+ private Version minVersion = Version.V_6_3_0;
private Date timestamp;
private String description;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/Quantiles.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java
similarity index 96%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/Quantiles.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java
index 1c047d6c30284..795028847a0be 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/Quantiles.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java
@@ -16,14 +16,14 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.process;
+package org.elasticsearch.client.ml.job.process;
+import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.util.Date;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyCause.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java
similarity index 99%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyCause.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java
index 7ad57b24fcbdc..4fbe5ac1ff381 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyCause.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.results;
+package org.elasticsearch.client.ml.job.results;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyRecord.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java
similarity index 99%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyRecord.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java
index 4747f3a48bdc8..db4483fef4bfd 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyRecord.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java
@@ -16,8 +16,9 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.results;
+package org.elasticsearch.client.ml.job.results;
+import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@@ -25,7 +26,6 @@
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser.Token;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.time.format.DateTimeFormatter;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Bucket.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Bucket.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java
index cbaf83abbad40..2dfed4c383403 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Bucket.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java
@@ -16,8 +16,9 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.results;
+package org.elasticsearch.client.ml.job.results;
+import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@@ -25,7 +26,6 @@
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser.Token;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.time.format.DateTimeFormatter;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketInfluencer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketInfluencer.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java
index 29d8447cd6a37..6fc2a9b8b2d54 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketInfluencer.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java
@@ -16,8 +16,9 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.results;
+package org.elasticsearch.client.ml.job.results;
+import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@@ -25,7 +26,6 @@
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser.Token;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.time.format.DateTimeFormatter;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/CategoryDefinition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/CategoryDefinition.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java
index 59b59006b33a1..dd65899e67e12 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/CategoryDefinition.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java
@@ -16,13 +16,13 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.results;
+package org.elasticsearch.client.ml.job.results;
+import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.util.ArrayList;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influence.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influence.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java
index 53607479d66f4..bfcc545362d3a 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influence.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.results;
+package org.elasticsearch.client.ml.job.results;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influencer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influencer.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java
index 51c88883608b0..28ceb243bf6b2 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influencer.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java
@@ -16,8 +16,9 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.results;
+package org.elasticsearch.client.ml.job.results;
+import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@@ -25,7 +26,6 @@
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser.Token;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.time.format.DateTimeFormatter;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/OverallBucket.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java
similarity index 98%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/OverallBucket.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java
index 4f13b4b26646e..eaf050f8be9fb 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/OverallBucket.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java
@@ -16,8 +16,9 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.results;
+package org.elasticsearch.client.ml.job.results;
+import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
@@ -25,7 +26,6 @@
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.io.IOException;
import java.time.format.DateTimeFormatter;
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Result.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java
similarity index 95%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Result.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java
index cce5fa65ebb44..a7f8933a0a131 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Result.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.results;
+package org.elasticsearch.client.ml.job.results;
import org.elasticsearch.common.ParseField;
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java
new file mode 100644
index 0000000000000..a6b41beca8366
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java
@@ -0,0 +1,174 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * A class to hold statistics about forecasts.
+ */
+public class ForecastStats implements ToXContentObject {
+
+ public static final ParseField TOTAL = new ParseField("total");
+ public static final ParseField FORECASTED_JOBS = new ParseField("forecasted_jobs");
+ public static final ParseField MEMORY_BYTES = new ParseField("memory_bytes");
+ public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms");
+ public static final ParseField RECORDS = new ParseField("records");
+ public static final ParseField STATUS = new ParseField("status");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("forecast_stats",
+ true,
+ (a) -> {
+ int i = 0;
+ long total = (long)a[i++];
+ SimpleStats memoryStats = (SimpleStats)a[i++];
+ SimpleStats recordStats = (SimpleStats)a[i++];
+ SimpleStats runtimeStats = (SimpleStats)a[i++];
+ Map statusCounts = (Map)a[i];
+ return new ForecastStats(total, memoryStats, recordStats, runtimeStats, statusCounts);
+ });
+
+ static {
+ PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, MEMORY_BYTES);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, RECORDS);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, PROCESSING_TIME_MS);
+ PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
+ p -> {
+ Map counts = new HashMap<>();
+ p.map().forEach((key, value) -> counts.put(key, ((Number)value).longValue()));
+ return counts;
+ }, STATUS, ObjectParser.ValueType.OBJECT);
+ }
+
+ private final long total;
+ private final long forecastedJobs;
+ private SimpleStats memoryStats;
+ private SimpleStats recordStats;
+ private SimpleStats runtimeStats;
+ private Map statusCounts;
+
+ public ForecastStats(long total,
+ SimpleStats memoryStats,
+ SimpleStats recordStats,
+ SimpleStats runtimeStats,
+ Map statusCounts) {
+ this.total = total;
+ this.forecastedJobs = total > 0 ? 1 : 0;
+ if (total > 0) {
+ this.memoryStats = Objects.requireNonNull(memoryStats);
+ this.recordStats = Objects.requireNonNull(recordStats);
+ this.runtimeStats = Objects.requireNonNull(runtimeStats);
+ this.statusCounts = Collections.unmodifiableMap(statusCounts);
+ }
+ }
+
+ /**
+ * The number of forecasts currently available for this model.
+ */
+ public long getTotal() {
+ return total;
+ }
+
+ /**
+ * The number of jobs that have at least one forecast.
+ */
+ public long getForecastedJobs() {
+ return forecastedJobs;
+ }
+
+ /**
+ * Statistics about the memory usage: minimum, maximum, average and total.
+ */
+ public SimpleStats getMemoryStats() {
+ return memoryStats;
+ }
+
+ /**
+ * Statistics about the number of forecast records: minimum, maximum, average and total.
+ */
+ public SimpleStats getRecordStats() {
+ return recordStats;
+ }
+
+ /**
+ * Statistics about the forecast runtime in milliseconds: minimum, maximum, average and total
+ */
+ public SimpleStats getRuntimeStats() {
+ return runtimeStats;
+ }
+
+ /**
+ * Counts per forecast status, for example: {"finished" : 2}.
+ */
+ public Map getStatusCounts() {
+ return statusCounts;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(TOTAL.getPreferredName(), total);
+ builder.field(FORECASTED_JOBS.getPreferredName(), forecastedJobs);
+
+ if (total > 0) {
+ builder.field(MEMORY_BYTES.getPreferredName(), memoryStats);
+ builder.field(RECORDS.getPreferredName(), recordStats);
+ builder.field(PROCESSING_TIME_MS.getPreferredName(), runtimeStats);
+ builder.field(STATUS.getPreferredName(), statusCounts);
+ }
+ return builder.endObject();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(total, forecastedJobs, memoryStats, recordStats, runtimeStats, statusCounts);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ ForecastStats other = (ForecastStats) obj;
+ return Objects.equals(total, other.total) &&
+ Objects.equals(forecastedJobs, other.forecastedJobs) &&
+ Objects.equals(memoryStats, other.memoryStats) &&
+ Objects.equals(recordStats, other.recordStats) &&
+ Objects.equals(runtimeStats, other.runtimeStats) &&
+ Objects.equals(statusCounts, other.statusCounts);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java
new file mode 100644
index 0000000000000..df5be4aa4c5cc
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.client.ml.job.config.JobState;
+import org.elasticsearch.client.ml.job.process.DataCounts;
+import org.elasticsearch.client.ml.job.process.ModelSizeStats;
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.client.ml.NodeAttributes;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Class containing the statistics for a Machine Learning job.
+ *
+ */
+public class JobStats implements ToXContentObject {
+
+ private static final ParseField DATA_COUNTS = new ParseField("data_counts");
+ private static final ParseField MODEL_SIZE_STATS = new ParseField("model_size_stats");
+ private static final ParseField FORECASTS_STATS = new ParseField("forecasts_stats");
+ private static final ParseField STATE = new ParseField("state");
+ private static final ParseField NODE = new ParseField("node");
+ private static final ParseField OPEN_TIME = new ParseField("open_time");
+ private static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation");
+
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("job_stats",
+ true,
+ (a) -> {
+ int i = 0;
+ String jobId = (String) a[i++];
+ DataCounts dataCounts = (DataCounts) a[i++];
+ JobState jobState = (JobState) a[i++];
+ ModelSizeStats.Builder modelSizeStatsBuilder = (ModelSizeStats.Builder) a[i++];
+ ModelSizeStats modelSizeStats = modelSizeStatsBuilder == null ? null : modelSizeStatsBuilder.build();
+ ForecastStats forecastStats = (ForecastStats) a[i++];
+ NodeAttributes node = (NodeAttributes) a[i++];
+ String assignmentExplanation = (String) a[i++];
+ TimeValue openTime = (TimeValue) a[i];
+ return new JobStats(jobId,
+ dataCounts,
+ jobState,
+ modelSizeStats,
+ forecastStats,
+ node,
+ assignmentExplanation,
+ openTime);
+ });
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
+ PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataCounts.PARSER, DATA_COUNTS);
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ (p) -> JobState.fromString(p.text()),
+ STATE,
+ ObjectParser.ValueType.VALUE);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelSizeStats.PARSER, MODEL_SIZE_STATS);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ForecastStats.PARSER, FORECASTS_STATS);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE);
+ PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION);
+ PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
+ (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), OPEN_TIME.getPreferredName()),
+ OPEN_TIME,
+ ObjectParser.ValueType.STRING_OR_NULL);
+ }
+
+
+ private final String jobId;
+ private final DataCounts dataCounts;
+ private final JobState state;
+ private final ModelSizeStats modelSizeStats;
+ private final ForecastStats forecastStats;
+ private final NodeAttributes node;
+ private final String assignmentExplanation;
+ private final TimeValue openTime;
+
+ JobStats(String jobId, DataCounts dataCounts, JobState state, @Nullable ModelSizeStats modelSizeStats,
+ @Nullable ForecastStats forecastStats, @Nullable NodeAttributes node,
+ @Nullable String assignmentExplanation, @Nullable TimeValue opentime) {
+ this.jobId = Objects.requireNonNull(jobId);
+ this.dataCounts = Objects.requireNonNull(dataCounts);
+ this.state = Objects.requireNonNull(state);
+ this.modelSizeStats = modelSizeStats;
+ this.forecastStats = forecastStats;
+ this.node = node;
+ this.assignmentExplanation = assignmentExplanation;
+ this.openTime = opentime;
+ }
+
+ /**
+ * The jobId referencing the job for these statistics
+ */
+ public String getJobId() {
+ return jobId;
+ }
+
+ /**
+ * An object that describes the number of records processed and any related error counts
+ * See {@link DataCounts}
+ */
+ public DataCounts getDataCounts() {
+ return dataCounts;
+ }
+
+ /**
+ * An object that provides information about the size and contents of the model.
+ * See {@link ModelSizeStats}
+ */
+ public ModelSizeStats getModelSizeStats() {
+ return modelSizeStats;
+ }
+
+ /**
+ * An object that provides statistical information about forecasts of this job.
+ * See {@link ForecastStats}
+ */
+ public ForecastStats getForecastStats() {
+ return forecastStats;
+ }
+
+ /**
+ * The status of the job
+ * See {@link JobState}
+ */
+ public JobState getState() {
+ return state;
+ }
+
+ /**
+ * For open jobs only, contains information about the node where the job runs
+ * See {@link NodeAttributes}
+ */
+ public NodeAttributes getNode() {
+ return node;
+ }
+
+ /**
+ * For open jobs only, contains messages relating to the selection of a node to run the job.
+ */
+ public String getAssignmentExplanation() {
+ return assignmentExplanation;
+ }
+
+ /**
+ * For open jobs only, the elapsed time for which the job has been open
+ */
+ public TimeValue getOpenTime() {
+ return openTime;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(Job.ID.getPreferredName(), jobId);
+ builder.field(DATA_COUNTS.getPreferredName(), dataCounts);
+ builder.field(STATE.getPreferredName(), state.toString());
+ if (modelSizeStats != null) {
+ builder.field(MODEL_SIZE_STATS.getPreferredName(), modelSizeStats);
+ }
+ if (forecastStats != null) {
+ builder.field(FORECASTS_STATS.getPreferredName(), forecastStats);
+ }
+ if (node != null) {
+ builder.field(NODE.getPreferredName(), node);
+ }
+ if (assignmentExplanation != null) {
+ builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation);
+ }
+ if (openTime != null) {
+ builder.field(OPEN_TIME.getPreferredName(), openTime.getStringRep());
+ }
+ return builder.endObject();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobId, dataCounts, modelSizeStats, forecastStats, state, node, assignmentExplanation, openTime);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ JobStats other = (JobStats) obj;
+ return Objects.equals(jobId, other.jobId) &&
+ Objects.equals(this.dataCounts, other.dataCounts) &&
+ Objects.equals(this.modelSizeStats, other.modelSizeStats) &&
+ Objects.equals(this.forecastStats, other.forecastStats) &&
+ Objects.equals(this.state, other.state) &&
+ Objects.equals(this.node, other.node) &&
+ Objects.equals(this.assignmentExplanation, other.assignmentExplanation) &&
+ Objects.equals(this.openTime, other.openTime);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java
new file mode 100644
index 0000000000000..f4c8aa0fa3b29
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Helper class for min, max, avg and total statistics for a quantity
+ */
+public class SimpleStats implements ToXContentObject {
+
+ public static final ParseField MIN = new ParseField("min");
+ public static final ParseField MAX = new ParseField("max");
+ public static final ParseField AVG = new ParseField("avg");
+ public static final ParseField TOTAL = new ParseField("total");
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("simple_stats", true,
+ (a) -> {
+ int i = 0;
+ double total = (double)a[i++];
+ double min = (double)a[i++];
+ double max = (double)a[i++];
+ double avg = (double)a[i++];
+ return new SimpleStats(total, min, max, avg);
+ });
+
+ static {
+ PARSER.declareDouble(ConstructingObjectParser.constructorArg(), TOTAL);
+ PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MIN);
+ PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX);
+ PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AVG);
+ }
+
+ private final double total;
+ private final double min;
+ private final double max;
+ private final double avg;
+
+ SimpleStats(double total, double min, double max, double avg) {
+ this.total = total;
+ this.min = min;
+ this.max = max;
+ this.avg = avg;
+ }
+
+ public double getMin() {
+ return min;
+ }
+
+ public double getMax() {
+ return max;
+ }
+
+ public double getAvg() {
+ return avg;
+ }
+
+ public double getTotal() {
+ return total;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(total, min, max, avg);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ SimpleStats other = (SimpleStats) obj;
+ return Objects.equals(total, other.total) &&
+ Objects.equals(min, other.min) &&
+ Objects.equals(avg, other.avg) &&
+ Objects.equals(max, other.max);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(MIN.getPreferredName(), min);
+ builder.field(MAX.getPreferredName(), max);
+ builder.field(AVG.getPreferredName(), avg);
+ builder.field(TOTAL.getPreferredName(), total);
+ builder.endObject();
+ return builder;
+ }
+}
+
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/util/PageParams.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/util/PageParams.java
new file mode 100644
index 0000000000000..52d54188f7007
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/util/PageParams.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.util;
+
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Paging parameters for GET requests
+ */
+public class PageParams implements ToXContentObject {
+
+ public static final ParseField PAGE = new ParseField("page");
+ public static final ParseField FROM = new ParseField("from");
+ public static final ParseField SIZE = new ParseField("size");
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(PAGE.getPreferredName(),
+ a -> new PageParams((Integer) a[0], (Integer) a[1]));
+
+ static {
+ PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), FROM);
+ PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), SIZE);
+ }
+
+ private final Integer from;
+ private final Integer size;
+
+ /**
+ * Constructs paging parameters
+ * @param from skips the specified number of items. When {@code null} the default value will be used.
+ * @param size specifies the maximum number of items to obtain. When {@code null} the default value will be used.
+ */
+ public PageParams(@Nullable Integer from, @Nullable Integer size) {
+ this.from = from;
+ this.size = size;
+ }
+
+ public int getFrom() {
+ return from;
+ }
+
+ public int getSize() {
+ return size;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (from != null) {
+ builder.field(FROM.getPreferredName(), from);
+ }
+ if (size != null) {
+ builder.field(SIZE.getPreferredName(), size);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(from, size);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ PageParams other = (PageParams) obj;
+ return Objects.equals(from, other.from) &&
+ Objects.equals(size, other.size);
+ }
+
+}
diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/TimeUtil.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/util/TimeUtil.java
similarity index 97%
rename from x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/TimeUtil.java
rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/util/TimeUtil.java
index 549b196949145..4c21ffb2175b2 100644
--- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/TimeUtil.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/util/TimeUtil.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.protocol.xpack.ml.job.util;
+package org.elasticsearch.client.ml.job.util;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.XContentParser;
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/RefreshPolicy.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/RefreshPolicy.java
new file mode 100644
index 0000000000000..8b72f704edff4
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/RefreshPolicy.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.security;
+
+/**
+ * Enumeration of values that control the refresh policy for a request that
+ * supports specifying a refresh policy.
+ */
+public enum RefreshPolicy {
+
+ /**
+ * Don't refresh after this request. The default.
+ */
+ NONE("false"),
+ /**
+ * Force a refresh as part of this request. This refresh policy does not scale for high indexing or search throughput but is useful
+ * to present a consistent view to for indices with very low traffic. And it is wonderful for tests!
+ */
+ IMMEDIATE("true"),
+ /**
+ * Leave this request open until a refresh has made the contents of this request visible to search. This refresh policy is
+ * compatible with high indexing and search throughput but it causes the request to wait to reply until a refresh occurs.
+ */
+ WAIT_UNTIL("wait_for");
+
+ private final String value;
+
+ RefreshPolicy(String value) {
+ this.value = value;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ /**
+ * Get the default refresh policy, which is NONE
+ */
+ public static RefreshPolicy getDefault() {
+ return RefreshPolicy.NONE;
+ }
+}
diff --git a/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt b/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt
index 33e136a66f44f..cc179e12e3163 100644
--- a/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt
+++ b/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt
@@ -20,5 +20,14 @@ org.apache.http.entity.ContentType#create(java.lang.String,java.lang.String)
org.apache.http.entity.ContentType#create(java.lang.String,java.nio.charset.Charset)
org.apache.http.entity.ContentType#create(java.lang.String,org.apache.http.NameValuePair[])
+@defaultMessage ES's logging infrastructure uses log4j2 which we don't want to force on high level rest client users
+org.elasticsearch.common.logging.DeprecationLogger
+org.elasticsearch.common.logging.ESLoggerFactory
+org.elasticsearch.common.logging.LogConfigurator
+org.elasticsearch.common.logging.LoggerMessageFormat
+org.elasticsearch.common.logging.Loggers
+org.elasticsearch.common.logging.NodeNamePatternConverter
+org.elasticsearch.common.logging.PrefixLogger
+
@defaultMessage We can't rely on log4j2 being on the classpath so don't log deprecations!
org.elasticsearch.common.xcontent.LoggingDeprecationHandler
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java
index 58b4b268788b5..a914008376a5d 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java
@@ -19,6 +19,7 @@
package org.elasticsearch.client;
+import org.apache.http.util.EntityUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
@@ -34,6 +35,7 @@
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
+import org.elasticsearch.client.Request;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.rest.RestStatus;
@@ -174,6 +176,8 @@ public void testClusterHealthYellowClusterLevel() throws IOException {
request.timeout("5s");
ClusterHealthResponse response = execute(request, highLevelClient().cluster()::health, highLevelClient().cluster()::healthAsync);
+ logger.info("Shard stats\n{}", EntityUtils.toString(
+ client().performRequest(new Request("GET", "/_cat/shards")).getEntity()));
assertYellowShards(response);
assertThat(response.getIndices().size(), equalTo(0));
}
@@ -186,6 +190,8 @@ public void testClusterHealthYellowIndicesLevel() throws IOException {
request.level(ClusterHealthRequest.Level.INDICES);
ClusterHealthResponse response = execute(request, highLevelClient().cluster()::health, highLevelClient().cluster()::healthAsync);
+ logger.info("Shard stats\n{}", EntityUtils.toString(
+ client().performRequest(new Request("GET", "/_cat/shards")).getEntity()));
assertYellowShards(response);
assertThat(response.getIndices().size(), equalTo(2));
for (Map.Entry entry : response.getIndices().entrySet()) {
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java
index 89f357477fa06..e02d9f451ebe0 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java
@@ -41,12 +41,17 @@
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.get.GetResult;
+import org.elasticsearch.index.query.IdsQueryBuilder;
+import org.elasticsearch.index.reindex.BulkByScrollResponse;
+import org.elasticsearch.index.reindex.ReindexRequest;
+import org.elasticsearch.index.reindex.UpdateByQueryRequest;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
@@ -624,6 +629,135 @@ public void testBulk() throws IOException {
validateBulkResponses(nbItems, errors, bulkResponse, bulkRequest);
}
+ public void testReindex() throws IOException {
+ final String sourceIndex = "source1";
+ final String destinationIndex = "dest";
+ {
+ // Prepare
+ Settings settings = Settings.builder()
+ .put("number_of_shards", 1)
+ .put("number_of_replicas", 0)
+ .build();
+ createIndex(sourceIndex, settings);
+ createIndex(destinationIndex, settings);
+ assertEquals(
+ RestStatus.OK,
+ highLevelClient().bulk(
+ new BulkRequest()
+ .add(new IndexRequest(sourceIndex, "type", "1")
+ .source(Collections.singletonMap("foo", "bar"), XContentType.JSON))
+ .add(new IndexRequest(sourceIndex, "type", "2")
+ .source(Collections.singletonMap("foo2", "bar2"), XContentType.JSON))
+ .setRefreshPolicy(RefreshPolicy.IMMEDIATE),
+ RequestOptions.DEFAULT
+ ).status()
+ );
+ }
+ {
+ // test1: create one doc in dest
+ ReindexRequest reindexRequest = new ReindexRequest();
+ reindexRequest.setSourceIndices(sourceIndex);
+ reindexRequest.setDestIndex(destinationIndex);
+ reindexRequest.setSourceQuery(new IdsQueryBuilder().addIds("1").types("type"));
+ reindexRequest.setRefresh(true);
+ BulkByScrollResponse bulkResponse = execute(reindexRequest, highLevelClient()::reindex, highLevelClient()::reindexAsync);
+ assertEquals(1, bulkResponse.getCreated());
+ assertEquals(1, bulkResponse.getTotal());
+ assertEquals(0, bulkResponse.getDeleted());
+ assertEquals(0, bulkResponse.getNoops());
+ assertEquals(0, bulkResponse.getVersionConflicts());
+ assertEquals(1, bulkResponse.getBatches());
+ assertTrue(bulkResponse.getTook().getMillis() > 0);
+ assertEquals(1, bulkResponse.getBatches());
+ assertEquals(0, bulkResponse.getBulkFailures().size());
+ assertEquals(0, bulkResponse.getSearchFailures().size());
+ }
+ {
+ // test2: create 1 and update 1
+ ReindexRequest reindexRequest = new ReindexRequest();
+ reindexRequest.setSourceIndices(sourceIndex);
+ reindexRequest.setDestIndex(destinationIndex);
+ BulkByScrollResponse bulkResponse = execute(reindexRequest, highLevelClient()::reindex, highLevelClient()::reindexAsync);
+ assertEquals(1, bulkResponse.getCreated());
+ assertEquals(2, bulkResponse.getTotal());
+ assertEquals(1, bulkResponse.getUpdated());
+ assertEquals(0, bulkResponse.getDeleted());
+ assertEquals(0, bulkResponse.getNoops());
+ assertEquals(0, bulkResponse.getVersionConflicts());
+ assertEquals(1, bulkResponse.getBatches());
+ assertTrue(bulkResponse.getTook().getMillis() > 0);
+ assertEquals(1, bulkResponse.getBatches());
+ assertEquals(0, bulkResponse.getBulkFailures().size());
+ assertEquals(0, bulkResponse.getSearchFailures().size());
+ }
+ }
+
+ public void testUpdateByQuery() throws IOException {
+ final String sourceIndex = "source1";
+ {
+ // Prepare
+ Settings settings = Settings.builder()
+ .put("number_of_shards", 1)
+ .put("number_of_replicas", 0)
+ .build();
+ createIndex(sourceIndex, settings);
+ assertEquals(
+ RestStatus.OK,
+ highLevelClient().bulk(
+ new BulkRequest()
+ .add(new IndexRequest(sourceIndex, "type", "1")
+ .source(Collections.singletonMap("foo", 1), XContentType.JSON))
+ .add(new IndexRequest(sourceIndex, "type", "2")
+ .source(Collections.singletonMap("foo", 2), XContentType.JSON))
+ .setRefreshPolicy(RefreshPolicy.IMMEDIATE),
+ RequestOptions.DEFAULT
+ ).status()
+ );
+ }
+ {
+ // test1: create one doc in dest
+ UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest();
+ updateByQueryRequest.indices(sourceIndex);
+ updateByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1").types("type"));
+ updateByQueryRequest.setRefresh(true);
+ BulkByScrollResponse bulkResponse =
+ execute(updateByQueryRequest, highLevelClient()::updateByQuery, highLevelClient()::updateByQueryAsync);
+ assertEquals(1, bulkResponse.getTotal());
+ assertEquals(1, bulkResponse.getUpdated());
+ assertEquals(0, bulkResponse.getNoops());
+ assertEquals(0, bulkResponse.getVersionConflicts());
+ assertEquals(1, bulkResponse.getBatches());
+ assertTrue(bulkResponse.getTook().getMillis() > 0);
+ assertEquals(1, bulkResponse.getBatches());
+ assertEquals(0, bulkResponse.getBulkFailures().size());
+ assertEquals(0, bulkResponse.getSearchFailures().size());
+ }
+ {
+ // test2: update using script
+ UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest();
+ updateByQueryRequest.indices(sourceIndex);
+ updateByQueryRequest.setScript(new Script("if (ctx._source.foo == 2) ctx._source.foo++;"));
+ updateByQueryRequest.setRefresh(true);
+ BulkByScrollResponse bulkResponse =
+ execute(updateByQueryRequest, highLevelClient()::updateByQuery, highLevelClient()::updateByQueryAsync);
+ assertEquals(2, bulkResponse.getTotal());
+ assertEquals(2, bulkResponse.getUpdated());
+ assertEquals(0, bulkResponse.getDeleted());
+ assertEquals(0, bulkResponse.getNoops());
+ assertEquals(0, bulkResponse.getVersionConflicts());
+ assertEquals(1, bulkResponse.getBatches());
+ assertTrue(bulkResponse.getTook().getMillis() > 0);
+ assertEquals(1, bulkResponse.getBatches());
+ assertEquals(0, bulkResponse.getBulkFailures().size());
+ assertEquals(0, bulkResponse.getSearchFailures().size());
+ assertEquals(
+ 3,
+ (int) (highLevelClient().get(new GetRequest(sourceIndex, "type", "2"), RequestOptions.DEFAULT)
+ .getSourceAsMap().get("foo"))
+ );
+ }
+ }
+
public void testBulkProcessorIntegration() throws IOException {
int nbItems = randomIntBetween(10, 100);
boolean[] errors = new boolean[nbItems];
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java
new file mode 100644
index 0000000000000..4376b47d737b4
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java
@@ -0,0 +1,139 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client;
+
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpPut;
+import org.elasticsearch.action.ShardOperationFailedException;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.TermQueryBuilder;
+import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
+import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
+import org.elasticsearch.protocol.xpack.graph.Hop;
+import org.elasticsearch.protocol.xpack.graph.Vertex;
+import org.elasticsearch.protocol.xpack.graph.VertexRequest;
+import org.hamcrest.Matchers;
+import org.junit.Before;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+public class GraphIT extends ESRestHighLevelClientTestCase {
+
+ @Before
+ public void indexDocuments() throws IOException {
+ // Create chain of doc IDs across indices 1->2->3
+ Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/type/1");
+ doc1.setJsonEntity("{ \"num\":[1], \"const\":\"start\"}");
+ client().performRequest(doc1);
+
+ Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/type/1");
+ doc2.setJsonEntity("{\"num\":[1,2], \"const\":\"foo\"}");
+ client().performRequest(doc2);
+
+ Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/type/2");
+ doc3.setJsonEntity("{\"num\":[2,3], \"const\":\"foo\"}");
+ client().performRequest(doc3);
+
+ Request doc4 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/type/2");
+ doc4.setJsonEntity("{\"num\":\"string\", \"const\":\"foo\"}");
+ client().performRequest(doc4);
+
+ Request doc5 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/type/2");
+ doc5.setJsonEntity("{\"num\":[2,4], \"const\":\"foo\"}");
+ client().performRequest(doc5);
+
+
+ client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));
+ }
+
+ public void testCleanExplore() throws Exception {
+ GraphExploreRequest graphExploreRequest = new GraphExploreRequest();
+ graphExploreRequest.indices("index1", "index2");
+ graphExploreRequest.useSignificance(false);
+ int numHops = 3;
+ for (int i = 0; i < numHops; i++) {
+ QueryBuilder guidingQuery = null;
+ if (i == 0) {
+ guidingQuery = new TermQueryBuilder("const.keyword", "start");
+ } else if (randomBoolean()){
+ guidingQuery = new TermQueryBuilder("const.keyword", "foo");
+ }
+ Hop hop = graphExploreRequest.createNextHop(guidingQuery);
+ VertexRequest vr = hop.addVertexRequest("num");
+ vr.minDocCount(1);
+ }
+ Map expectedTermsAndDepths = new HashMap<>();
+ expectedTermsAndDepths.put("1", 0);
+ expectedTermsAndDepths.put("2", 1);
+ expectedTermsAndDepths.put("3", 2);
+
+ GraphExploreResponse exploreResponse = highLevelClient().graph().explore(graphExploreRequest, RequestOptions.DEFAULT);
+ Map actualTermsAndDepths = new HashMap<>();
+ Collection v = exploreResponse.getVertices();
+ for (Vertex vertex : v) {
+ actualTermsAndDepths.put(vertex.getTerm(), vertex.getHopDepth());
+ }
+ assertEquals(expectedTermsAndDepths, actualTermsAndDepths);
+ assertThat(exploreResponse.isTimedOut(), Matchers.is(false));
+ ShardOperationFailedException[] failures = exploreResponse.getShardFailures();
+ assertThat(failures.length, Matchers.equalTo(0));
+
+ }
+
+ public void testBadExplore() throws Exception {
+ //Explore indices where lack of fielddata=true on one index leads to partial failures
+ GraphExploreRequest graphExploreRequest = new GraphExploreRequest();
+ graphExploreRequest.indices("index1", "index2", "index_no_field_data");
+ graphExploreRequest.useSignificance(false);
+ int numHops = 3;
+ for (int i = 0; i < numHops; i++) {
+ QueryBuilder guidingQuery = null;
+ if (i == 0) {
+ guidingQuery = new TermQueryBuilder("const.keyword", "start");
+ } else if (randomBoolean()){
+ guidingQuery = new TermQueryBuilder("const.keyword", "foo");
+ }
+ Hop hop = graphExploreRequest.createNextHop(guidingQuery);
+ VertexRequest vr = hop.addVertexRequest("num");
+ vr.minDocCount(1);
+ }
+ Map expectedTermsAndDepths = new HashMap<>();
+ expectedTermsAndDepths.put("1", 0);
+ expectedTermsAndDepths.put("2", 1);
+ expectedTermsAndDepths.put("3", 2);
+
+ GraphExploreResponse exploreResponse = highLevelClient().graph().explore(graphExploreRequest, RequestOptions.DEFAULT);
+ Map actualTermsAndDepths = new HashMap<>();
+ Collection v = exploreResponse.getVertices();
+ for (Vertex vertex : v) {
+ actualTermsAndDepths.put(vertex.getTerm(), vertex.getHopDepth());
+ }
+ assertEquals(expectedTermsAndDepths, actualTermsAndDepths);
+ assertThat(exploreResponse.isTimedOut(), Matchers.is(false));
+ ShardOperationFailedException[] failures = exploreResponse.getShardFailures();
+ assertThat(failures.length, Matchers.equalTo(1));
+ assertTrue(failures[0].reason().contains("Fielddata is disabled"));
+
+ }
+
+
+}
\ No newline at end of file
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
new file mode 100644
index 0000000000000..d84099d9a3c40
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
@@ -0,0 +1,195 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client;
+
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpPut;
+import org.elasticsearch.client.ml.CloseJobRequest;
+import org.elasticsearch.client.ml.DeleteJobRequest;
+import org.elasticsearch.client.ml.GetBucketsRequest;
+import org.elasticsearch.client.ml.GetJobRequest;
+import org.elasticsearch.client.ml.OpenJobRequest;
+import org.elasticsearch.client.ml.PutJobRequest;
+import org.elasticsearch.client.ml.job.config.AnalysisConfig;
+import org.elasticsearch.client.ml.job.config.Detector;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.client.ml.job.util.PageParams;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.json.JsonXContent;
+import org.elasticsearch.client.ml.FlushJobRequest;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.Collections;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class MLRequestConvertersTests extends ESTestCase {
+
+ public void testPutJob() throws IOException {
+ Job job = createValidJob("foo");
+ PutJobRequest putJobRequest = new PutJobRequest(job);
+
+ Request request = MLRequestConverters.putJob(putJobRequest);
+
+ assertEquals(HttpPut.METHOD_NAME, request.getMethod());
+ assertThat(request.getEndpoint(), equalTo("/_xpack/ml/anomaly_detectors/foo"));
+ try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
+ Job parsedJob = Job.PARSER.apply(parser, null).build();
+ assertThat(parsedJob, equalTo(job));
+ }
+ }
+
+ public void testGetJob() {
+ GetJobRequest getJobRequest = new GetJobRequest();
+
+ Request request = MLRequestConverters.getJob(getJobRequest);
+
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors", request.getEndpoint());
+ assertFalse(request.getParameters().containsKey("allow_no_jobs"));
+
+ getJobRequest = new GetJobRequest("job1", "jobs*");
+ getJobRequest.setAllowNoJobs(true);
+ request = MLRequestConverters.getJob(getJobRequest);
+
+ assertEquals("/_xpack/ml/anomaly_detectors/job1,jobs*", request.getEndpoint());
+ assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs"));
+ }
+
+ public void testOpenJob() throws Exception {
+ String jobId = "some-job-id";
+ OpenJobRequest openJobRequest = new OpenJobRequest(jobId);
+ openJobRequest.setTimeout(TimeValue.timeValueMinutes(10));
+
+ Request request = MLRequestConverters.openJob(openJobRequest);
+ assertEquals(HttpPost.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_open", request.getEndpoint());
+ assertEquals(requestEntityToString(request), "{\"job_id\":\""+ jobId +"\",\"timeout\":\"10m\"}");
+ }
+
+ public void testCloseJob() throws Exception {
+ String jobId = "somejobid";
+ CloseJobRequest closeJobRequest = new CloseJobRequest(jobId);
+
+ Request request = MLRequestConverters.closeJob(closeJobRequest);
+ assertEquals(HttpPost.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_close", request.getEndpoint());
+ assertEquals("{\"job_id\":\"somejobid\"}", requestEntityToString(request));
+
+ closeJobRequest = new CloseJobRequest(jobId, "otherjobs*");
+ closeJobRequest.setForce(true);
+ closeJobRequest.setAllowNoJobs(false);
+ closeJobRequest.setTimeout(TimeValue.timeValueMinutes(10));
+ request = MLRequestConverters.closeJob(closeJobRequest);
+
+ assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + ",otherjobs*/_close", request.getEndpoint());
+ assertEquals("{\"job_id\":\"somejobid,otherjobs*\",\"timeout\":\"10m\",\"force\":true,\"allow_no_jobs\":false}",
+ requestEntityToString(request));
+ }
+
+ public void testDeleteJob() {
+ String jobId = randomAlphaOfLength(10);
+ DeleteJobRequest deleteJobRequest = new DeleteJobRequest(jobId);
+
+ Request request = MLRequestConverters.deleteJob(deleteJobRequest);
+ assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/" + jobId, request.getEndpoint());
+ assertEquals(Boolean.toString(false), request.getParameters().get("force"));
+
+ deleteJobRequest.setForce(true);
+ request = MLRequestConverters.deleteJob(deleteJobRequest);
+ assertEquals(Boolean.toString(true), request.getParameters().get("force"));
+ }
+
+ public void testGetBuckets() throws IOException {
+ String jobId = randomAlphaOfLength(10);
+ GetBucketsRequest getBucketsRequest = new GetBucketsRequest(jobId);
+ getBucketsRequest.setPageParams(new PageParams(100, 300));
+ getBucketsRequest.setAnomalyScore(75.0);
+ getBucketsRequest.setSort("anomaly_score");
+ getBucketsRequest.setDescending(true);
+
+ Request request = MLRequestConverters.getBuckets(getBucketsRequest);
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/buckets", request.getEndpoint());
+ try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
+ GetBucketsRequest parsedRequest = GetBucketsRequest.PARSER.apply(parser, null);
+ assertThat(parsedRequest, equalTo(getBucketsRequest));
+ }
+ }
+
+ public void testFlushJob() throws Exception {
+ String jobId = randomAlphaOfLength(10);
+ FlushJobRequest flushJobRequest = new FlushJobRequest(jobId);
+
+ Request request = MLRequestConverters.flushJob(flushJobRequest);
+ assertEquals(HttpPost.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_flush", request.getEndpoint());
+ assertEquals("{\"job_id\":\"" + jobId + "\"}", requestEntityToString(request));
+
+ flushJobRequest.setSkipTime("1000");
+ flushJobRequest.setStart("105");
+ flushJobRequest.setEnd("200");
+ flushJobRequest.setAdvanceTime("100");
+ flushJobRequest.setCalcInterim(true);
+ request = MLRequestConverters.flushJob(flushJobRequest);
+ assertEquals(
+ "{\"job_id\":\"" + jobId + "\",\"calc_interim\":true,\"start\":\"105\"," +
+ "\"end\":\"200\",\"advance_time\":\"100\",\"skip_time\":\"1000\"}",
+ requestEntityToString(request));
+ }
+
+ public void testGetJobStats() {
+ GetJobStatsRequest getJobStatsRequestRequest = new GetJobStatsRequest();
+
+ Request request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
+
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/_stats", request.getEndpoint());
+ assertFalse(request.getParameters().containsKey("allow_no_jobs"));
+
+ getJobStatsRequestRequest = new GetJobStatsRequest("job1", "jobs*");
+ getJobStatsRequestRequest.setAllowNoJobs(true);
+ request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
+
+ assertEquals("/_xpack/ml/anomaly_detectors/job1,jobs*/_stats", request.getEndpoint());
+ assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs"));
+ }
+
+ private static Job createValidJob(String jobId) {
+ AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList(
+ Detector.builder().setFunction("count").build()));
+ Job.Builder jobBuilder = Job.builder(jobId);
+ jobBuilder.setAnalysisConfig(analysisConfig);
+ return jobBuilder.build();
+ }
+
+ private static String requestEntityToString(Request request) throws Exception {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ request.getEntity().writeTo(bos);
+ return bos.toString("UTF-8");
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java
new file mode 100644
index 0000000000000..6c8ca81cea224
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java
@@ -0,0 +1,299 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client;
+
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.support.WriteRequest;
+import org.elasticsearch.client.ml.GetBucketsRequest;
+import org.elasticsearch.client.ml.GetBucketsResponse;
+import org.elasticsearch.client.ml.GetRecordsRequest;
+import org.elasticsearch.client.ml.GetRecordsResponse;
+import org.elasticsearch.client.ml.PutJobRequest;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.client.ml.job.results.AnomalyRecord;
+import org.elasticsearch.client.ml.job.results.Bucket;
+import org.elasticsearch.client.ml.job.util.PageParams;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.junit.After;
+import org.junit.Before;
+
+import java.io.IOException;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.lessThan;
+import static org.hamcrest.Matchers.lessThanOrEqualTo;
+
+public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase {
+
+ private static final String RESULTS_INDEX = ".ml-anomalies-shared";
+ private static final String DOC = "doc";
+
+ private static final String JOB_ID = "get-results-it-job";
+
+ // 2018-08-01T00:00:00Z
+ private static final long START_TIME_EPOCH_MS = 1533081600000L;
+
+ private Stats bucketStats = new Stats();
+ private Stats recordStats = new Stats();
+
+ @Before
+ public void createJobAndIndexResults() throws IOException {
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+ Job job = MachineLearningIT.buildJob(JOB_ID);
+ machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
+
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
+
+ long time = START_TIME_EPOCH_MS;
+ long endTime = time + 3600000L * 24 * 10; // 10 days of hourly buckets
+ while (time < endTime) {
+ addBucketIndexRequest(time, false, bulkRequest);
+ addRecordIndexRequests(time, false, bulkRequest);
+ time += 3600000L;
+ }
+
+ // Also index an interim bucket
+ addBucketIndexRequest(time, true, bulkRequest);
+ addRecordIndexRequest(time, true, bulkRequest);
+
+ highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
+ }
+
+ private void addBucketIndexRequest(long timestamp, boolean isInterim, BulkRequest bulkRequest) {
+ IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC);
+ double bucketScore = randomDoubleBetween(0.0, 100.0, true);
+ bucketStats.report(bucketScore);
+ indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"bucket\", \"timestamp\": " + timestamp + "," +
+ "\"bucket_span\": 3600,\"is_interim\": " + isInterim + ", \"anomaly_score\": " + bucketScore +
+ ", \"bucket_influencers\":[{\"job_id\": \"" + JOB_ID + "\", \"result_type\":\"bucket_influencer\", " +
+ "\"influencer_field_name\": \"bucket_time\", \"timestamp\": " + timestamp + ", \"bucket_span\": 3600, " +
+ "\"is_interim\": " + isInterim + "}]}", XContentType.JSON);
+ bulkRequest.add(indexRequest);
+ }
+
+ private void addRecordIndexRequests(long timestamp, boolean isInterim, BulkRequest bulkRequest) {
+ if (randomBoolean()) {
+ return;
+ }
+ int recordCount = randomIntBetween(1, 3);
+ for (int i = 0; i < recordCount; ++i) {
+ addRecordIndexRequest(timestamp, isInterim, bulkRequest);
+ }
+ }
+
+ private void addRecordIndexRequest(long timestamp, boolean isInterim, BulkRequest bulkRequest) {
+ IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC);
+ double recordScore = randomDoubleBetween(0.0, 100.0, true);
+ recordStats.report(recordScore);
+ double p = randomDoubleBetween(0.0, 0.05, false);
+ indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"record\", \"timestamp\": " + timestamp + "," +
+ "\"bucket_span\": 3600,\"is_interim\": " + isInterim + ", \"record_score\": " + recordScore + ", \"probability\": "
+ + p + "}", XContentType.JSON);
+ bulkRequest.add(indexRequest);
+ }
+
+ @After
+ public void deleteJob() throws IOException {
+ new MlRestTestStateCleaner(logger, client()).clearMlMetadata();
+ }
+
+ public void testGetBuckets() throws IOException {
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.count(), equalTo(241L));
+ assertThat(response.buckets().size(), equalTo(100));
+ assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS));
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ request.setTimestamp("1533081600000");
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.count(), equalTo(1L));
+ assertThat(response.buckets().size(), equalTo(1));
+ assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS));
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ request.setAnomalyScore(75.0);
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.count(), equalTo(bucketStats.criticalCount));
+ assertThat(response.buckets().size(), equalTo((int) Math.min(100, bucketStats.criticalCount)));
+ assertThat(response.buckets().stream().anyMatch(b -> b.getAnomalyScore() < 75.0), is(false));
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ request.setExcludeInterim(true);
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.count(), equalTo(240L));
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ request.setStart("1533081600000");
+ request.setEnd("1533092400000");
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.count(), equalTo(3L));
+ assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS));
+ assertThat(response.buckets().get(1).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 3600000L));
+ assertThat(response.buckets().get(2).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 2 * + 3600000L));
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ request.setPageParams(new PageParams(3, 3));
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.buckets().size(), equalTo(3));
+ assertThat(response.buckets().get(0).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 3 * 3600000L));
+ assertThat(response.buckets().get(1).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 4 * 3600000L));
+ assertThat(response.buckets().get(2).getTimestamp().getTime(), equalTo(START_TIME_EPOCH_MS + 5 * 3600000L));
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ request.setSort("anomaly_score");
+ request.setDescending(true);
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ double previousScore = 100.0;
+ for (Bucket bucket : response.buckets()) {
+ assertThat(bucket.getAnomalyScore(), lessThanOrEqualTo(previousScore));
+ previousScore = bucket.getAnomalyScore();
+ }
+ }
+ {
+ GetBucketsRequest request = new GetBucketsRequest(JOB_ID);
+ // Make sure we get all buckets
+ request.setPageParams(new PageParams(0, 10000));
+ request.setExpand(true);
+
+ GetBucketsResponse response = execute(request, machineLearningClient::getBuckets, machineLearningClient::getBucketsAsync);
+
+ assertThat(response.buckets().stream().anyMatch(b -> b.getRecords().size() > 0), is(true));
+ }
+ }
+
+ public void testGetRecords() throws IOException {
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+
+ {
+ GetRecordsRequest request = new GetRecordsRequest(JOB_ID);
+
+ GetRecordsResponse response = execute(request, machineLearningClient::getRecords, machineLearningClient::getRecordsAsync);
+
+ assertThat(response.count(), greaterThan(0L));
+ assertThat(response.count(), equalTo(recordStats.totalCount()));
+ }
+ {
+ GetRecordsRequest request = new GetRecordsRequest(JOB_ID);
+ request.setRecordScore(50.0);
+
+ GetRecordsResponse response = execute(request, machineLearningClient::getRecords, machineLearningClient::getRecordsAsync);
+
+ long majorAndCriticalCount = recordStats.majorCount + recordStats.criticalCount;
+ assertThat(response.count(), equalTo(majorAndCriticalCount));
+ assertThat(response.records().size(), equalTo((int) Math.min(100, majorAndCriticalCount)));
+ assertThat(response.records().stream().anyMatch(r -> r.getRecordScore() < 50.0), is(false));
+ }
+ {
+ GetRecordsRequest request = new GetRecordsRequest(JOB_ID);
+ request.setExcludeInterim(true);
+
+ GetRecordsResponse response = execute(request, machineLearningClient::getRecords, machineLearningClient::getRecordsAsync);
+
+ assertThat(response.count(), equalTo(recordStats.totalCount() - 1));
+ }
+ {
+ long end = START_TIME_EPOCH_MS + 10 * 3600000;
+ GetRecordsRequest request = new GetRecordsRequest(JOB_ID);
+ request.setStart(String.valueOf(START_TIME_EPOCH_MS));
+ request.setEnd(String.valueOf(end));
+
+ GetRecordsResponse response = execute(request, machineLearningClient::getRecords, machineLearningClient::getRecordsAsync);
+
+ for (AnomalyRecord record : response.records()) {
+ assertThat(record.getTimestamp().getTime(), greaterThanOrEqualTo(START_TIME_EPOCH_MS));
+ assertThat(record.getTimestamp().getTime(), lessThan(end));
+ }
+ }
+ {
+ GetRecordsRequest request = new GetRecordsRequest(JOB_ID);
+ request.setPageParams(new PageParams(3, 3));
+
+ GetRecordsResponse response = execute(request, machineLearningClient::getRecords, machineLearningClient::getRecordsAsync);
+
+ assertThat(response.records().size(), equalTo(3));
+ }
+ {
+ GetRecordsRequest request = new GetRecordsRequest(JOB_ID);
+ request.setSort("probability");
+ request.setDescending(true);
+
+ GetRecordsResponse response = execute(request, machineLearningClient::getRecords, machineLearningClient::getRecordsAsync);
+
+ double previousProb = 1.0;
+ for (AnomalyRecord record : response.records()) {
+ assertThat(record.getProbability(), lessThanOrEqualTo(previousProb));
+ previousProb = record.getProbability();
+ }
+ }
+ }
+
+ private static class Stats {
+ // score < 50.0
+ private long minorCount;
+
+ // score < 75.0
+ private long majorCount;
+
+ // score > 75.0
+ private long criticalCount;
+
+ private void report(double score) {
+ if (score < 50.0) {
+ minorCount++;
+ } else if (score < 75.0) {
+ majorCount++;
+ } else {
+ criticalCount++;
+ }
+ }
+
+ private long totalCount() {
+ return minorCount + majorCount + criticalCount;
+ }
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
index 94e73a14c188c..cd4b6ffc7691f 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
@@ -19,23 +19,49 @@
package org.elasticsearch.client;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
+import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.protocol.xpack.ml.OpenJobRequest;
-import org.elasticsearch.protocol.xpack.ml.OpenJobResponse;
-import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
-import org.elasticsearch.protocol.xpack.ml.PutJobResponse;
-import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig;
-import org.elasticsearch.protocol.xpack.ml.job.config.DataDescription;
-import org.elasticsearch.protocol.xpack.ml.job.config.Detector;
-import org.elasticsearch.protocol.xpack.ml.job.config.Job;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
+import org.elasticsearch.client.ml.GetJobStatsResponse;
+import org.elasticsearch.client.ml.job.config.JobState;
+import org.elasticsearch.client.ml.job.stats.JobStats;
+import org.elasticsearch.client.ml.CloseJobRequest;
+import org.elasticsearch.client.ml.CloseJobResponse;
+import org.elasticsearch.client.ml.DeleteJobRequest;
+import org.elasticsearch.client.ml.DeleteJobResponse;
+import org.elasticsearch.client.ml.GetJobRequest;
+import org.elasticsearch.client.ml.GetJobResponse;
+import org.elasticsearch.client.ml.OpenJobRequest;
+import org.elasticsearch.client.ml.OpenJobResponse;
+import org.elasticsearch.client.ml.PutJobRequest;
+import org.elasticsearch.client.ml.PutJobResponse;
+import org.elasticsearch.client.ml.job.config.AnalysisConfig;
+import org.elasticsearch.client.ml.job.config.DataDescription;
+import org.elasticsearch.client.ml.job.config.Detector;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.client.ml.FlushJobRequest;
+import org.elasticsearch.client.ml.FlushJobResponse;
+import org.junit.After;
+import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.hasItems;
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
public class MachineLearningIT extends ESRestHighLevelClientTestCase {
+ @After
+ public void cleanUp() throws IOException {
+ new MlRestTestStateCleaner(logger, client()).clearMlMetadata();
+ }
+
public void testPutJob() throws Exception {
String jobId = randomValidJobId();
Job job = buildJob(jobId);
@@ -48,6 +74,54 @@ public void testPutJob() throws Exception {
assertThat(createdJob.getJobType(), is(Job.ANOMALY_DETECTOR_JOB_TYPE));
}
+ public void testGetJob() throws Exception {
+ String jobId1 = randomValidJobId();
+ String jobId2 = randomValidJobId();
+
+ Job job1 = buildJob(jobId1);
+ Job job2 = buildJob(jobId2);
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+ machineLearningClient.putJob(new PutJobRequest(job1), RequestOptions.DEFAULT);
+ machineLearningClient.putJob(new PutJobRequest(job2), RequestOptions.DEFAULT);
+
+ GetJobRequest request = new GetJobRequest(jobId1, jobId2);
+
+ // Test getting specific jobs
+ GetJobResponse response = execute(request, machineLearningClient::getJob, machineLearningClient::getJobAsync);
+
+ assertEquals(2, response.count());
+ assertThat(response.jobs(), hasSize(2));
+ assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), containsInAnyOrder(jobId1, jobId2));
+
+ // Test getting all jobs explicitly
+ request = GetJobRequest.getAllJobsRequest();
+ response = execute(request, machineLearningClient::getJob, machineLearningClient::getJobAsync);
+
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobs().size() >= 2L);
+ assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+
+ // Test getting all jobs implicitly
+ response = execute(new GetJobRequest(), machineLearningClient::getJob, machineLearningClient::getJobAsync);
+
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobs().size() >= 2L);
+ assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+ }
+
+ public void testDeleteJob() throws Exception {
+ String jobId = randomValidJobId();
+ Job job = buildJob(jobId);
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+ machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
+
+ DeleteJobResponse response = execute(new DeleteJobRequest(jobId),
+ machineLearningClient::deleteJob,
+ machineLearningClient::deleteJobAsync);
+
+ assertTrue(response.isAcknowledged());
+ }
+
public void testOpenJob() throws Exception {
String jobId = randomValidJobId();
Job job = buildJob(jobId);
@@ -60,6 +134,90 @@ public void testOpenJob() throws Exception {
assertTrue(response.isOpened());
}
+ public void testCloseJob() throws Exception {
+ String jobId = randomValidJobId();
+ Job job = buildJob(jobId);
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+ machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
+ machineLearningClient.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT);
+
+ CloseJobResponse response = execute(new CloseJobRequest(jobId),
+ machineLearningClient::closeJob,
+ machineLearningClient::closeJobAsync);
+ assertTrue(response.isClosed());
+ }
+
+ public void testFlushJob() throws Exception {
+ String jobId = randomValidJobId();
+ Job job = buildJob(jobId);
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+ machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
+ machineLearningClient.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT);
+
+ FlushJobResponse response = execute(new FlushJobRequest(jobId),
+ machineLearningClient::flushJob,
+ machineLearningClient::flushJobAsync);
+ assertTrue(response.isFlushed());
+ }
+
+ public void testGetJobStats() throws Exception {
+ String jobId1 = "ml-get-job-stats-test-id-1";
+ String jobId2 = "ml-get-job-stats-test-id-2";
+
+ Job job1 = buildJob(jobId1);
+ Job job2 = buildJob(jobId2);
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+ machineLearningClient.putJob(new PutJobRequest(job1), RequestOptions.DEFAULT);
+ machineLearningClient.putJob(new PutJobRequest(job2), RequestOptions.DEFAULT);
+
+ machineLearningClient.openJob(new OpenJobRequest(jobId1), RequestOptions.DEFAULT);
+
+ GetJobStatsRequest request = new GetJobStatsRequest(jobId1, jobId2);
+
+ // Test getting specific
+ GetJobStatsResponse response = execute(request, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
+
+ assertEquals(2, response.count());
+ assertThat(response.jobStats(), hasSize(2));
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), containsInAnyOrder(jobId1, jobId2));
+ for (JobStats stats : response.jobStats()) {
+ if (stats.getJobId().equals(jobId1)) {
+ assertEquals(JobState.OPENED, stats.getState());
+ } else {
+ assertEquals(JobState.CLOSED, stats.getState());
+ }
+ }
+
+ // Test getting all explicitly
+ request = GetJobStatsRequest.getAllJobStatsRequest();
+ response = execute(request, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
+
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobStats().size() >= 2L);
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+
+ // Test getting all implicitly
+ response = execute(new GetJobStatsRequest(), machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
+
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobStats().size() >= 2L);
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+
+ // Test getting all with wildcard
+ request = new GetJobStatsRequest("ml-get-job-stats-test-id-*");
+ response = execute(request, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobStats().size() >= 2L);
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+
+ // Test when allow_no_jobs is false
+ final GetJobStatsRequest erroredRequest = new GetJobStatsRequest("jobs-that-do-not-exist*");
+ erroredRequest.setAllowNoJobs(false);
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
+ () -> execute(erroredRequest, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync));
+ assertThat(exception.status().getStatus(), equalTo(404));
+ }
+
public static String randomValidJobId() {
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray());
return generator.ofCodePointsLength(random(), 10, 10);
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java
new file mode 100644
index 0000000000000..7ad86576245ef
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client;
+
+import org.apache.logging.log4j.Logger;
+import org.elasticsearch.common.xcontent.support.XContentMapValues;
+import org.elasticsearch.test.rest.ESRestTestCase;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This is temporarily duplicated from the server side.
+ * @TODO Replace with an implementation using the HLRC once
+ * the APIs for managing datafeeds are implemented.
+ */
+public class MlRestTestStateCleaner {
+
+ private final Logger logger;
+ private final RestClient adminClient;
+
+ public MlRestTestStateCleaner(Logger logger, RestClient adminClient) {
+ this.logger = logger;
+ this.adminClient = adminClient;
+ }
+
+ public void clearMlMetadata() throws IOException {
+ deleteAllDatafeeds();
+ deleteAllJobs();
+ // indices will be deleted by the ESRestTestCase class
+ }
+
+ @SuppressWarnings("unchecked")
+ private void deleteAllDatafeeds() throws IOException {
+ final Request datafeedsRequest = new Request("GET", "/_xpack/ml/datafeeds");
+ datafeedsRequest.addParameter("filter_path", "datafeeds");
+ final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest);
+ final List