Skip to content

Commit

Permalink
Merge branch '6.x' into ccr-6.x
Browse files Browse the repository at this point in the history
* 6.x:
  HLRC: ML Flush job (#33187)
  Switch more LLREST usage to new style Requests (#33171)
  HLRC: Adding ML Job stats (#33183)
  HLREST: add reindex API (#32679)
  Mute testSyncerOnClosingShard
  [DOCS] Moves machine learning APIs to docs folder (#31118)
  • Loading branch information
dnhatn committed Sep 2, 2018
2 parents 64695b6 + 549ccf9 commit 3cbae0a
Show file tree
Hide file tree
Showing 116 changed files with 4,395 additions and 322 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,12 @@
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobStatsRequest;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.client.ml.FlushJobRequest;

import java.io.IOException;

Expand Down Expand Up @@ -126,6 +128,36 @@ static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOExceptio
return request;
}

static Request flushJob(FlushJobRequest flushJobRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("anomaly_detectors")
.addPathPart(flushJobRequest.getJobId())
.addPathPartAsIs("_flush")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setEntity(createEntity(flushJobRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}

static Request getJobStats(GetJobStatsRequest getJobStatsRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("anomaly_detectors")
.addPathPart(Strings.collectionToCommaDelimitedString(getJobStatsRequest.getJobIds()))
.addPathPartAsIs("_stats")
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);

RequestConverters.Params params = new RequestConverters.Params(request);
if (getJobStatsRequest.isAllowNoJobs() != null) {
params.putParam("allow_no_jobs", Boolean.toString(getJobStatsRequest.isAllowNoJobs()));
}
return request;
}

static Request getRecords(GetRecordsRequest getRecordsRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,11 @@
package org.elasticsearch.client;

import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.FlushJobResponse;
import org.elasticsearch.client.ml.GetJobStatsRequest;
import org.elasticsearch.client.ml.GetJobStatsResponse;
import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteJobRequest;
Expand Down Expand Up @@ -288,6 +293,101 @@ public void getBucketsAsync(GetBucketsRequest request, RequestOptions options, A
Collections.emptySet());
}

/**
* Flushes internally buffered data for the given Machine Learning Job ensuring all data sent to the has been processed.
* This may cause new results to be calculated depending on the contents of the buffer
*
* Both flush and close operations are similar,
* however the flush is more efficient if you are expecting to send more data for analysis.
*
* When flushing, the job remains open and is available to continue analyzing data.
* A close operation additionally prunes and persists the model state to disk and the
* job must be opened again before analyzing further data.
*
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html">Flush ML job documentation</a>
*
* @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
*/
public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::flushJob,
options,
FlushJobResponse::fromXContent,
Collections.emptySet());
}

/**
* Flushes internally buffered data for the given Machine Learning Job asynchronously ensuring all data sent to the has been processed.
* This may cause new results to be calculated depending on the contents of the buffer
*
* Both flush and close operations are similar,
* however the flush is more efficient if you are expecting to send more data for analysis.
*
* When flushing, the job remains open and is available to continue analyzing data.
* A close operation additionally prunes and persists the model state to disk and the
* job must be opened again before analyzing further data.
*
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html">Flush ML job documentation</a>
*
* @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void flushJobAsync(FlushJobRequest request, RequestOptions options, ActionListener<FlushJobResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::flushJob,
options,
FlushJobResponse::fromXContent,
listener,
Collections.emptySet());
}

/**
* Gets usage statistics for one or more Machine Learning jobs
*
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html">Get Job stats docs</a>
* </p>
* @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return {@link GetJobStatsResponse} response object containing
* the {@link JobStats} objects and the number of jobs found
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public GetJobStatsResponse getJobStats(GetJobStatsRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::getJobStats,
options,
GetJobStatsResponse::fromXContent,
Collections.emptySet());
}

/**
* Gets one or more Machine Learning job configuration info, asynchronously.
*
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html">Get Job stats docs</a>
* </p>
* @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified with {@link GetJobStatsResponse} upon request completion
*/
public void getJobStatsAsync(GetJobStatsRequest request, RequestOptions options, ActionListener<GetJobStatsResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::getJobStats,
options,
GetJobStatsResponse::fromXContent,
listener,
Collections.emptySet());
}

/**
* Gets the records for a Machine Learning Job.
* <p>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.rankeval.RankEvalRequest;
import org.elasticsearch.index.reindex.ReindexRequest;
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
import org.elasticsearch.protocol.xpack.XPackUsageRequest;
import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest;
Expand Down Expand Up @@ -832,6 +833,21 @@ static Request clusterHealth(ClusterHealthRequest healthRequest) {
return request;
}

static Request reindex(ReindexRequest reindexRequest) throws IOException {
String endpoint = new EndpointBuilder().addPathPart("_reindex").build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params(request)
.withRefresh(reindexRequest.isRefresh())
.withTimeout(reindexRequest.getTimeout())
.withWaitForActiveShards(reindexRequest.getWaitForActiveShards(), ActiveShardCount.DEFAULT);

if (reindexRequest.getScrollTime() != null) {
params.putParam("scroll", reindexRequest.getScrollTime());
}
request.setEntity(createEntity(reindexRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}

static Request rollover(RolloverRequest rolloverRequest) throws IOException {
String endpoint = new EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover")
.addPathPart(rolloverRequest.getNewIndexName()).build();
Expand Down Expand Up @@ -1140,10 +1156,10 @@ static Request xPackInfo(XPackInfoRequest infoRequest) {
static Request xPackGraphExplore(GraphExploreRequest exploreRequest) throws IOException {
String endpoint = endpoint(exploreRequest.indices(), exploreRequest.types(), "_xpack/graph/_explore");
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
request.setEntity(createEntity(exploreRequest, REQUEST_BODY_CONTENT_TYPE));
request.setEntity(createEntity(exploreRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
}

static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.rankeval.RankEvalRequest;
import org.elasticsearch.index.rankeval.RankEvalResponse;
import org.elasticsearch.index.reindex.BulkByScrollResponse;
import org.elasticsearch.index.reindex.ReindexRequest;
import org.elasticsearch.plugins.spi.NamedXContentProvider;
import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestStatus;
Expand Down Expand Up @@ -323,7 +325,7 @@ public final XPackClient xpack() {
* Watcher APIs on elastic.co</a> for more information.
*/
public WatcherClient watcher() { return watcherClient; }

/**
* Provides methods for accessing the Elastic Licensed Graph explore API that
* is shipped with the default distribution of Elasticsearch. All of
Expand All @@ -332,7 +334,7 @@ public final XPackClient xpack() {
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/graph-explore-api.html">
* Graph API on elastic.co</a> for more information.
*/
public GraphClient graph() { return graphClient; }
public GraphClient graph() { return graphClient; }

/**
* Provides methods for accessing the Elastic Licensed Licensing APIs that
Expand Down Expand Up @@ -415,6 +417,33 @@ public final void bulkAsync(BulkRequest bulkRequest, ActionListener<BulkResponse
performRequestAsyncAndParseEntity(bulkRequest, RequestConverters::bulk, BulkResponse::fromXContent, listener, emptySet(), headers);
}

/**
* Executes a reindex request.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html">Reindex API on elastic.co</a>
* @param reindexRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public final BulkByScrollResponse reindex(ReindexRequest reindexRequest, RequestOptions options) throws IOException {
return performRequestAndParseEntity(
reindexRequest, RequestConverters::reindex, options, BulkByScrollResponse::fromXContent, emptySet()
);
}

/**
* Asynchronously executes a reindex request.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html">Reindex API on elastic.co</a>
* @param reindexRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public final void reindexAsync(ReindexRequest reindexRequest, RequestOptions options, ActionListener<BulkByScrollResponse> listener) {
performRequestAsyncAndParseEntity(
reindexRequest, RequestConverters::reindex, options, BulkByScrollResponse::fromXContent, listener, emptySet()
);
}

/**
* Pings the remote Elasticsearch cluster and returns true if the ping succeeded, false otherwise
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
Expand Down
Loading

0 comments on commit 3cbae0a

Please sign in to comment.