From 9ab1325953b30fb3d881e9c27b2afdd3274322b6 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 20 Jun 2018 16:34:56 -0600 Subject: [PATCH 01/34] Introduce http and tcp server channels (#31446) Historically in TcpTransport server channels were represented by the same channel interface as socket channels. This was necessary as TcpTransport was parameterized by the channel type. This commit introduces TcpServerChannel and HttpServerChannel classes. Additionally, it adds the implementations for the various transports. This allows server channels to have unique functionality and not implement the methods they do not support (such as send and getRemoteAddress). Additionally, with the introduction of HttpServerChannel this commit extracts some of the storing and closing channel work to the abstract http server transport. --- .../http/netty4/Netty4HttpRequestHandler.java | 11 +- .../http/netty4/Netty4HttpServerChannel.java | 76 ++++++++++ .../netty4/Netty4HttpServerTransport.java | 116 +++++---------- .../netty4/Netty4MessageChannelHandler.java | 18 ++- ...yTcpChannel.java => Netty4TcpChannel.java} | 6 +- .../netty4/Netty4TcpServerChannel.java | 84 +++++++++++ .../transport/netty4/Netty4Transport.java | 45 +++--- .../Netty4SizeHeaderFrameDecoderTests.java | 2 +- .../http/nio/NioHttpServerChannel.java | 44 ++++++ .../http/nio/NioHttpServerTransport.java | 134 +++--------------- .../transport/nio/NioTcpServerChannel.java | 23 +-- .../transport/nio/NioTransport.java | 4 +- .../http/AbstractHttpServerTransport.java | 92 ++++++++++-- .../elasticsearch/http/HttpServerChannel.java | 34 +++++ .../transport/TcpServerChannel.java | 46 ++++++ .../elasticsearch/transport/TcpTransport.java | 27 ++-- .../AbstractHttpServerTransportTests.java | 13 +- .../transport/TcpTransportTests.java | 6 +- .../transport/MockTcpTransport.java | 2 +- .../transport/nio/MockNioTransport.java | 18 +-- .../netty4/SecurityNetty4Transport.java | 2 +- .../transport/ServerTransportFilter.java | 6 +- .../transport/nio/SecurityNioTransport.java | 6 +- 23 files changed, 501 insertions(+), 314 deletions(-) create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerChannel.java rename modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/{NettyTcpChannel.java => Netty4TcpChannel.java} (96%) create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpServerChannel.java create mode 100644 plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java create mode 100644 server/src/main/java/org/elasticsearch/http/HttpServerChannel.java create mode 100644 server/src/main/java/org/elasticsearch/transport/TcpServerChannel.java diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java index 124bd607ab7ae..ab078ad10d337 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java @@ -29,8 +29,6 @@ import org.elasticsearch.http.HttpPipelinedRequest; import org.elasticsearch.transport.netty4.Netty4Utils; -import static org.elasticsearch.http.netty4.Netty4HttpServerTransport.HTTP_CHANNEL_KEY; - @ChannelHandler.Sharable class Netty4HttpRequestHandler extends SimpleChannelInboundHandler> { @@ -42,7 +40,7 @@ class Netty4HttpRequestHandler extends SimpleChannelInboundHandler msg) throws Exception { - Netty4HttpChannel channel = ctx.channel().attr(HTTP_CHANNEL_KEY).get(); + Netty4HttpChannel channel = ctx.channel().attr(Netty4HttpServerTransport.HTTP_CHANNEL_KEY).get(); FullHttpRequest request = msg.getRequest(); try { @@ -77,12 +75,11 @@ protected void channelRead0(ChannelHandlerContext ctx, HttpPipelinedRequest closeContext = new CompletableContext<>(); + + Netty4HttpServerChannel(Channel channel) { + this.channel = channel; + this.channel.closeFuture().addListener(f -> { + if (f.isSuccess()) { + closeContext.complete(null); + } else { + Throwable cause = f.cause(); + if (cause instanceof Error) { + Netty4Utils.maybeDie(cause); + closeContext.completeExceptionally(new Exception(cause)); + } else { + closeContext.completeExceptionally((Exception) cause); + } + } + }); + } + + @Override + public InetSocketAddress getLocalAddress() { + return (InetSocketAddress) channel.localAddress(); + } + + @Override + public void addCloseListener(ActionListener listener) { + closeContext.addListener(ActionListener.toBiConsumer(listener)); + } + + @Override + public boolean isOpen() { + return channel.isOpen(); + } + + @Override + public void close() { + channel.close(); + } + + @Override + public String toString() { + return "Netty4HttpChannel{localAddress=" + getLocalAddress() + "}"; + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 8a49ce38b89bc..34f00c0684040 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -23,6 +23,7 @@ import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandler; +import io.netty.channel.ChannelHandlerAdapter; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; @@ -42,22 +43,19 @@ import io.netty.util.AttributeKey; import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.CloseableChannel; -import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.AbstractHttpServerTransport; -import org.elasticsearch.http.BindHttpException; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpHandlingSettings; -import org.elasticsearch.http.HttpStats; +import org.elasticsearch.http.HttpServerChannel; import org.elasticsearch.http.netty4.cors.Netty4CorsConfig; import org.elasticsearch.http.netty4.cors.Netty4CorsConfigBuilder; import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; @@ -65,14 +63,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.netty4.Netty4Utils; -import java.io.IOException; -import java.net.InetAddress; import java.net.InetSocketAddress; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Pattern; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; @@ -154,12 +147,6 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { private final int pipeliningMaxEvents; - private final boolean tcpNoDelay; - private final boolean tcpKeepAlive; - private final boolean reuseAddress; - - private final ByteSizeValue tcpSendBufferSize; - private final ByteSizeValue tcpReceiveBufferSize; private final RecvByteBufAllocator recvByteBufAllocator; private final int readTimeoutMillis; @@ -167,8 +154,6 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { protected volatile ServerBootstrap serverBootstrap; - protected final List serverChannels = new ArrayList<>(); - private final Netty4CorsConfig corsConfig; public Netty4HttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool, @@ -184,11 +169,6 @@ public Netty4HttpServerTransport(Settings settings, NetworkService networkServic this.maxCompositeBufferComponents = SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS.get(settings); this.workerCount = SETTING_HTTP_WORKER_COUNT.get(settings); - this.tcpNoDelay = SETTING_HTTP_TCP_NO_DELAY.get(settings); - this.tcpKeepAlive = SETTING_HTTP_TCP_KEEP_ALIVE.get(settings); - this.reuseAddress = SETTING_HTTP_TCP_REUSE_ADDRESS.get(settings); - this.tcpSendBufferSize = SETTING_HTTP_TCP_SEND_BUFFER_SIZE.get(settings); - this.tcpReceiveBufferSize = SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE.get(settings); this.readTimeoutMillis = Math.toIntExact(SETTING_HTTP_READ_TIMEOUT.get(settings).getMillis()); ByteSizeValue receivePredictor = SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE.get(settings); @@ -217,6 +197,7 @@ protected void doStart() { serverBootstrap.channel(NioServerSocketChannel.class); serverBootstrap.childHandler(configureServerChannelHandler()); + serverBootstrap.handler(new ServerChannelExceptionHandler(this)); serverBootstrap.childOption(ChannelOption.TCP_NODELAY, SETTING_HTTP_TCP_NO_DELAY.get(settings)); serverBootstrap.childOption(ChannelOption.SO_KEEPALIVE, SETTING_HTTP_TCP_KEEP_ALIVE.get(settings)); @@ -238,10 +219,7 @@ protected void doStart() { serverBootstrap.option(ChannelOption.SO_REUSEADDR, reuseAddress); serverBootstrap.childOption(ChannelOption.SO_REUSEADDR, reuseAddress); - this.boundAddress = createBoundHttpAddress(); - if (logger.isInfoEnabled()) { - logger.info("{}", boundAddress); - } + bindServer(); success = true; } finally { if (success == false) { @@ -284,78 +262,29 @@ static Netty4CorsConfig buildCorsConfig(Settings settings) { } @Override - protected TransportAddress bindAddress(final InetAddress hostAddress) { - final AtomicReference lastException = new AtomicReference<>(); - final AtomicReference boundSocket = new AtomicReference<>(); - boolean success = port.iterate(portNumber -> { - try { - synchronized (serverChannels) { - ChannelFuture future = serverBootstrap.bind(new InetSocketAddress(hostAddress, portNumber)).sync(); - serverChannels.add(future.channel()); - boundSocket.set((InetSocketAddress) future.channel().localAddress()); - } - } catch (Exception e) { - lastException.set(e); - return false; - } - return true; - }); - if (!success) { - throw new BindHttpException("Failed to bind to [" + port.getPortRangeString() + "]", lastException.get()); - } - - if (logger.isDebugEnabled()) { - logger.debug("Bound http to address {{}}", NetworkAddress.format(boundSocket.get())); - } - return new TransportAddress(boundSocket.get()); + protected HttpServerChannel bind(InetSocketAddress socketAddress) throws Exception { + ChannelFuture future = serverBootstrap.bind(socketAddress).sync(); + Channel channel = future.channel(); + Netty4HttpServerChannel httpServerChannel = new Netty4HttpServerChannel(channel); + channel.attr(HTTP_SERVER_CHANNEL_KEY).set(httpServerChannel); + return httpServerChannel; } @Override - protected void doStop() { - synchronized (serverChannels) { - if (!serverChannels.isEmpty()) { - try { - Netty4Utils.closeChannels(serverChannels); - } catch (IOException e) { - logger.trace("exception while closing channels", e); - } finally { - serverChannels.clear(); - } - } - } - - // TODO: Move all of channel closing to abstract class once server channels are handled - try { - CloseableChannel.closeChannels(new ArrayList<>(httpChannels), true); - } catch (Exception e) { - logger.warn("unexpected exception while closing http channels", e); - } - httpChannels.clear(); - - - + protected void stopInternal() { if (serverBootstrap != null) { serverBootstrap.config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS).awaitUninterruptibly(); serverBootstrap = null; } } - @Override - protected void doClose() { - } - - @Override - public HttpStats stats() { - return new HttpStats(httpChannels.size(), totalChannelsAccepted.get()); - } - @Override protected void onException(HttpChannel channel, Exception cause) { if (cause instanceof ReadTimeoutException) { if (logger.isTraceEnabled()) { logger.trace("Http read timeout {}", channel); } - CloseableChannel.closeChannel(channel);; + CloseableChannel.closeChannel(channel); } else { super.onException(channel, cause); } @@ -366,6 +295,7 @@ public ChannelHandler configureServerChannelHandler() { } static final AttributeKey HTTP_CHANNEL_KEY = AttributeKey.newInstance("es-http-channel"); + static final AttributeKey HTTP_SERVER_CHANNEL_KEY = AttributeKey.newInstance("es-http-server-channel"); protected static class HttpChannelHandler extends ChannelInitializer { @@ -413,4 +343,24 @@ public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws E } } + @ChannelHandler.Sharable + private static class ServerChannelExceptionHandler extends ChannelHandlerAdapter { + + private final Netty4HttpServerTransport transport; + + private ServerChannelExceptionHandler(Netty4HttpServerTransport transport) { + this.transport = transport; + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { + Netty4Utils.maybeDie(cause); + Netty4HttpServerChannel httpServerChannel = ctx.channel().attr(HTTP_SERVER_CHANNEL_KEY).get(); + if (cause instanceof Error) { + transport.onServerException(httpServerChannel, new Exception(cause)); + } else { + transport.onServerException(httpServerChannel, (Exception) cause); + } + } + } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java index 58440ae96e07a..698c86d048c1c 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java @@ -24,6 +24,8 @@ import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.util.Attribute; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.transport.TcpHeader; import org.elasticsearch.transport.Transports; @@ -36,11 +38,9 @@ final class Netty4MessageChannelHandler extends ChannelDuplexHandler { private final Netty4Transport transport; - private final String profileName; - Netty4MessageChannelHandler(Netty4Transport transport, String profileName) { + Netty4MessageChannelHandler(Netty4Transport transport) { this.transport = transport; - this.profileName = profileName; } @Override @@ -58,7 +58,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception // netty always copies a buffer, either in NioWorker in its read handler, where it copies to a fresh // buffer, or in the cumulative buffer, which is cleaned each time so it could be bigger than the actual size BytesReference reference = Netty4Utils.toBytesReference(buffer, remainingMessageSize); - Attribute channelAttribute = channel.attr(Netty4Transport.CHANNEL_KEY); + Attribute channelAttribute = channel.attr(Netty4Transport.CHANNEL_KEY); transport.messageReceived(reference, channelAttribute.get()); } finally { // Set the expected position of the buffer, no matter what happened @@ -69,7 +69,13 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { Netty4Utils.maybeDie(cause); - transport.exceptionCaught(ctx, cause); + final Throwable unwrapped = ExceptionsHelper.unwrap(cause, ElasticsearchException.class); + final Throwable newCause = unwrapped != null ? unwrapped : cause; + Netty4TcpChannel tcpChannel = ctx.channel().attr(Netty4Transport.CHANNEL_KEY).get(); + if (newCause instanceof Error) { + transport.onException(tcpChannel, new Exception(newCause)); + } else { + transport.onException(tcpChannel, (Exception) newCause); + } } - } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyTcpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java similarity index 96% rename from modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyTcpChannel.java rename to modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java index 89fabdcd763d1..78a1425500072 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyTcpChannel.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java @@ -30,13 +30,13 @@ import java.net.InetSocketAddress; -public class NettyTcpChannel implements TcpChannel { +public class Netty4TcpChannel implements TcpChannel { private final Channel channel; private final String profile; private final CompletableContext closeContext = new CompletableContext<>(); - NettyTcpChannel(Channel channel, String profile) { + Netty4TcpChannel(Channel channel, String profile) { this.channel = channel; this.profile = profile; this.channel.closeFuture().addListener(f -> { @@ -118,7 +118,7 @@ public Channel getLowLevelChannel() { @Override public String toString() { - return "NettyTcpChannel{" + + return "Netty4TcpChannel{" + "localAddress=" + getLocalAddress() + ", remoteAddress=" + channel.remoteAddress() + '}'; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpServerChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpServerChannel.java new file mode 100644 index 0000000000000..873a6c33fba11 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpServerChannel.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.channel.Channel; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.concurrent.CompletableContext; +import org.elasticsearch.transport.TcpServerChannel; + +import java.net.InetSocketAddress; + +public class Netty4TcpServerChannel implements TcpServerChannel { + + private final Channel channel; + private final String profile; + private final CompletableContext closeContext = new CompletableContext<>(); + + Netty4TcpServerChannel(Channel channel, String profile) { + this.channel = channel; + this.profile = profile; + this.channel.closeFuture().addListener(f -> { + if (f.isSuccess()) { + closeContext.complete(null); + } else { + Throwable cause = f.cause(); + if (cause instanceof Error) { + Netty4Utils.maybeDie(cause); + closeContext.completeExceptionally(new Exception(cause)); + } else { + closeContext.completeExceptionally((Exception) cause); + } + } + }); + } + + @Override + public String getProfile() { + return profile; + } + + @Override + public InetSocketAddress getLocalAddress() { + return (InetSocketAddress) channel.localAddress(); + } + + @Override + public void close() { + channel.close(); + } + + @Override + public void addCloseListener(ActionListener listener) { + closeContext.addListener(ActionListener.toBiConsumer(listener)); + } + + @Override + public boolean isOpen() { + return channel.isOpen(); + } + + @Override + public String toString() { + return "Netty4TcpChannel{" + + "localAddress=" + getLocalAddress() + + '}'; + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index 466c4b68bfa4e..c8c6fceb54304 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -25,6 +25,7 @@ import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandler; +import io.netty.channel.ChannelHandlerAdapter; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; @@ -37,8 +38,6 @@ import io.netty.util.concurrent.Future; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.Tuple; @@ -196,6 +195,7 @@ private void createServerBootstrap(ProfileSettings profileSettings) { serverBootstrap.channel(NioServerSocketChannel.class); serverBootstrap.childHandler(getServerChannelInitializer(name)); + serverBootstrap.handler(new ServerChannelExceptionHandler()); serverBootstrap.childOption(ChannelOption.TCP_NODELAY, profileSettings.tcpNoDelay); serverBootstrap.childOption(ChannelOption.SO_KEEPALIVE, profileSettings.tcpKeepAlive); @@ -226,17 +226,11 @@ protected ChannelHandler getClientChannelInitializer() { return new ClientChannelInitializer(); } - static final AttributeKey CHANNEL_KEY = AttributeKey.newInstance("es-channel"); - - protected final void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { - final Throwable unwrapped = ExceptionsHelper.unwrap(cause, ElasticsearchException.class); - final Throwable t = unwrapped != null ? unwrapped : cause; - Channel channel = ctx.channel(); - onException(channel.attr(CHANNEL_KEY).get(), t instanceof Exception ? (Exception) t : new ElasticsearchException(t)); - } + static final AttributeKey CHANNEL_KEY = AttributeKey.newInstance("es-channel"); + static final AttributeKey SERVER_CHANNEL_KEY = AttributeKey.newInstance("es-server-channel"); @Override - protected NettyTcpChannel initiateChannel(InetSocketAddress address, ActionListener listener) throws IOException { + protected Netty4TcpChannel initiateChannel(InetSocketAddress address, ActionListener listener) throws IOException { ChannelFuture channelFuture = bootstrap.connect(address); Channel channel = channelFuture.channel(); if (channel == null) { @@ -245,7 +239,7 @@ protected NettyTcpChannel initiateChannel(InetSocketAddress address, ActionListe } addClosedExceptionLogger(channel); - NettyTcpChannel nettyChannel = new NettyTcpChannel(channel, "default"); + Netty4TcpChannel nettyChannel = new Netty4TcpChannel(channel, "default"); channel.attr(CHANNEL_KEY).set(nettyChannel); channelFuture.addListener(f -> { @@ -266,10 +260,10 @@ protected NettyTcpChannel initiateChannel(InetSocketAddress address, ActionListe } @Override - protected NettyTcpChannel bind(String name, InetSocketAddress address) { + protected Netty4TcpServerChannel bind(String name, InetSocketAddress address) { Channel channel = serverBootstraps.get(name).bind(address).syncUninterruptibly().channel(); - NettyTcpChannel esChannel = new NettyTcpChannel(channel, name); - channel.attr(CHANNEL_KEY).set(esChannel); + Netty4TcpServerChannel esChannel = new Netty4TcpServerChannel(channel, name); + channel.attr(SERVER_CHANNEL_KEY).set(esChannel); return esChannel; } @@ -310,7 +304,7 @@ protected void initChannel(Channel ch) throws Exception { ch.pipeline().addLast("logging", new ESLoggingHandler()); ch.pipeline().addLast("size", new Netty4SizeHeaderFrameDecoder()); // using a dot as a prefix means this cannot come from any settings parsed - ch.pipeline().addLast("dispatcher", new Netty4MessageChannelHandler(Netty4Transport.this, ".client")); + ch.pipeline().addLast("dispatcher", new Netty4MessageChannelHandler(Netty4Transport.this)); } @Override @@ -331,11 +325,11 @@ protected ServerChannelInitializer(String name) { @Override protected void initChannel(Channel ch) throws Exception { addClosedExceptionLogger(ch); - NettyTcpChannel nettyTcpChannel = new NettyTcpChannel(ch, name); + Netty4TcpChannel nettyTcpChannel = new Netty4TcpChannel(ch, name); ch.attr(CHANNEL_KEY).set(nettyTcpChannel); ch.pipeline().addLast("logging", new ESLoggingHandler()); ch.pipeline().addLast("size", new Netty4SizeHeaderFrameDecoder()); - ch.pipeline().addLast("dispatcher", new Netty4MessageChannelHandler(Netty4Transport.this, name)); + ch.pipeline().addLast("dispatcher", new Netty4MessageChannelHandler(Netty4Transport.this)); serverAcceptedChannel(nettyTcpChannel); } @@ -353,4 +347,19 @@ private void addClosedExceptionLogger(Channel channel) { } }); } + + @ChannelHandler.Sharable + private class ServerChannelExceptionHandler extends ChannelHandlerAdapter { + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { + Netty4Utils.maybeDie(cause); + Netty4TcpServerChannel serverChannel = ctx.channel().attr(SERVER_CHANNEL_KEY).get(); + if (cause instanceof Error) { + onServerException(serverChannel, new Exception(cause)); + } else { + onServerException(serverChannel, (Exception) cause); + } + } + } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java index 7343da6c3b11a..4c783cf078769 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java @@ -70,7 +70,7 @@ public void startThreadPool() { nettyTransport.start(); TransportAddress[] boundAddresses = nettyTransport.boundAddress().boundAddresses(); - TransportAddress transportAddress = (TransportAddress) randomFrom(boundAddresses); + TransportAddress transportAddress = randomFrom(boundAddresses); port = transportAddress.address().getPort(); host = transportAddress.address().getAddress(); } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java new file mode 100644 index 0000000000000..2674d38dc490e --- /dev/null +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.nio; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.http.HttpServerChannel; +import org.elasticsearch.nio.NioServerSocketChannel; + +import java.io.IOException; +import java.nio.channels.ServerSocketChannel; + +public class NioHttpServerChannel extends NioServerSocketChannel implements HttpServerChannel { + + NioHttpServerChannel(ServerSocketChannel serverSocketChannel) throws IOException { + super(serverSocketChannel); + } + + @Override + public void addCloseListener(ActionListener listener) { + addCloseListener(ActionListener.toBiConsumer(listener)); + } + + @Override + public String toString() { + return "NioHttpServerChannel{localAddress=" + getLocalAddress() + "}"; + } +} diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java index aa0859e6146f2..b80778e964293 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java @@ -21,40 +21,29 @@ import io.netty.handler.codec.http.HttpMethod; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.network.CloseableChannel; -import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.AbstractHttpServerTransport; -import org.elasticsearch.http.BindHttpException; import org.elasticsearch.http.HttpChannel; +import org.elasticsearch.http.HttpServerChannel; import org.elasticsearch.http.HttpServerTransport; -import org.elasticsearch.http.HttpStats; import org.elasticsearch.http.nio.cors.NioCorsConfig; import org.elasticsearch.http.nio.cors.NioCorsConfigBuilder; import org.elasticsearch.nio.BytesChannelContext; import org.elasticsearch.nio.ChannelFactory; import org.elasticsearch.nio.EventHandler; import org.elasticsearch.nio.InboundChannelBuffer; -import org.elasticsearch.nio.NioChannel; import org.elasticsearch.nio.NioGroup; import org.elasticsearch.nio.NioSelector; -import org.elasticsearch.nio.NioServerSocketChannel; import org.elasticsearch.nio.NioSocketChannel; import org.elasticsearch.nio.ServerChannelContext; import org.elasticsearch.nio.SocketChannelContext; @@ -62,18 +51,11 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; -import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.regex.Pattern; @@ -113,7 +95,6 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { private final int tcpSendBufferSize; private final int tcpReceiveBufferSize; - private final Set serverChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); private NioGroup nioGroup; private HttpChannelFactory channelFactory; private final NioCorsConfig corsConfig; @@ -156,12 +137,7 @@ protected void doStart() { daemonThreadFactory(this.settings, HTTP_SERVER_WORKER_THREAD_NAME_PREFIX), workerCount, (s) -> new EventHandler(this::onNonChannelException, s)); channelFactory = new HttpChannelFactory(); - this.boundAddress = createBoundHttpAddress(); - - if (logger.isInfoEnabled()) { - logger.info("{}", boundAddress); - } - + bindServer(); success = true; } catch (IOException e) { throw new ElasticsearchException(e); @@ -173,26 +149,7 @@ protected void doStart() { } @Override - protected void doStop() { - synchronized (serverChannels) { - if (serverChannels.isEmpty() == false) { - try { - closeChannels(new ArrayList<>(serverChannels)); - } catch (Exception e) { - logger.error("unexpected exception while closing http server channels", e); - } - serverChannels.clear(); - } - } - - // TODO: Move all of channel closing to abstract class once server channels are handled - try { - CloseableChannel.closeChannels(new ArrayList<>(httpChannels), true); - } catch (Exception e) { - logger.warn("unexpected exception while closing http channels", e); - } - httpChannels.clear(); - + protected void stopInternal() { try { nioGroup.close(); } catch (Exception e) { @@ -201,40 +158,8 @@ protected void doStop() { } @Override - protected void doClose() throws IOException { - } - - @Override - protected TransportAddress bindAddress(InetAddress hostAddress) { - final AtomicReference lastException = new AtomicReference<>(); - final AtomicReference boundSocket = new AtomicReference<>(); - boolean success = port.iterate(portNumber -> { - try { - synchronized (serverChannels) { - InetSocketAddress address = new InetSocketAddress(hostAddress, portNumber); - NioServerSocketChannel channel = nioGroup.bindServerChannel(address, channelFactory); - serverChannels.add(channel); - boundSocket.set(channel.getLocalAddress()); - } - } catch (Exception e) { - lastException.set(e); - return false; - } - return true; - }); - if (success == false) { - throw new BindHttpException("Failed to bind to [" + port.getPortRangeString() + "]", lastException.get()); - } - - if (logger.isDebugEnabled()) { - logger.debug("Bound http to address {{}}", NetworkAddress.format(boundSocket.get())); - } - return new TransportAddress(boundSocket.get()); - } - - @Override - public HttpStats stats() { - return new HttpStats(serverChannels.size(), totalChannelsAccepted.get()); + protected HttpServerChannel bind(InetSocketAddress socketAddress) throws IOException { + return nioGroup.bindServerChannel(socketAddress, channelFactory); } static NioCorsConfig buildCorsConfig(Settings settings) { @@ -269,33 +194,11 @@ static NioCorsConfig buildCorsConfig(Settings settings) { .build(); } - private void closeChannels(List channels) { - List> futures = new ArrayList<>(channels.size()); - - for (NioChannel channel : channels) { - PlainActionFuture future = PlainActionFuture.newFuture(); - channel.addCloseListener(ActionListener.toBiConsumer(future)); - futures.add(future); - channel.close(); - } - - List closeExceptions = new ArrayList<>(); - for (ActionFuture f : futures) { - try { - f.actionGet(); - } catch (RuntimeException e) { - closeExceptions.add(e); - } - } - - ExceptionsHelper.rethrowAndSuppress(closeExceptions); - } - private void acceptChannel(NioSocketChannel socketChannel) { super.serverAcceptedChannel((HttpChannel) socketChannel); } - private class HttpChannelFactory extends ChannelFactory { + private class HttpChannelFactory extends ChannelFactory { private HttpChannelFactory() { super(new RawChannelFactory(tcpNoDelay, tcpKeepAlive, reuseAddress, tcpSendBufferSize, tcpReceiveBufferSize)); @@ -303,29 +206,28 @@ private HttpChannelFactory() { @Override public NioHttpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { - NioHttpChannel nioChannel = new NioHttpChannel(channel); + NioHttpChannel httpChannel = new NioHttpChannel(channel); java.util.function.Supplier pageSupplier = () -> { Recycler.V bytes = pageCacheRecycler.bytePage(false); return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close); }; - HttpReadWriteHandler httpReadWritePipeline = new HttpReadWriteHandler(nioChannel,NioHttpServerTransport.this, + HttpReadWriteHandler httpReadWritePipeline = new HttpReadWriteHandler(httpChannel,NioHttpServerTransport.this, handlingSettings, corsConfig); - Consumer exceptionHandler = (e) -> onException(nioChannel, e); - SocketChannelContext context = new BytesChannelContext(nioChannel, selector, exceptionHandler, httpReadWritePipeline, + Consumer exceptionHandler = (e) -> onException(httpChannel, e); + SocketChannelContext context = new BytesChannelContext(httpChannel, selector, exceptionHandler, httpReadWritePipeline, new InboundChannelBuffer(pageSupplier)); - nioChannel.setContext(context); - return nioChannel; + httpChannel.setContext(context); + return httpChannel; } @Override - public NioServerSocketChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { - NioServerSocketChannel nioChannel = new NioServerSocketChannel(channel); - Consumer exceptionHandler = (e) -> logger.error(() -> - new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); + public NioHttpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { + NioHttpServerChannel httpServerChannel = new NioHttpServerChannel(channel); + Consumer exceptionHandler = (e) -> onServerException(httpServerChannel, e); Consumer acceptor = NioHttpServerTransport.this::acceptChannel; - ServerChannelContext context = new ServerChannelContext(nioChannel, this, selector, acceptor, exceptionHandler); - nioChannel.setContext(context); - return nioChannel; + ServerChannelContext context = new ServerChannelContext(httpServerChannel, this, selector, acceptor, exceptionHandler); + httpServerChannel.setContext(context); + return httpServerChannel; } } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java index 10bf4ed752321..3c6d4b12df943 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java @@ -20,19 +20,17 @@ package org.elasticsearch.transport.nio; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.nio.NioServerSocketChannel; -import org.elasticsearch.transport.TcpChannel; +import org.elasticsearch.transport.TcpServerChannel; import java.io.IOException; -import java.net.InetSocketAddress; import java.nio.channels.ServerSocketChannel; /** - * This is an implementation of {@link NioServerSocketChannel} that adheres to the {@link TcpChannel} + * This is an implementation of {@link NioServerSocketChannel} that adheres to the {@link TcpServerChannel} * interface. As it is a server socket, setting SO_LINGER and sending messages is not supported. */ -public class NioTcpServerChannel extends NioServerSocketChannel implements TcpChannel { +public class NioTcpServerChannel extends NioServerSocketChannel implements TcpServerChannel { private final String profile; @@ -41,21 +39,6 @@ public NioTcpServerChannel(String profile, ServerSocketChannel socketChannel) th this.profile = profile; } - @Override - public void sendMessage(BytesReference reference, ActionListener listener) { - throw new UnsupportedOperationException("Cannot send a message to a server channel."); - } - - @Override - public void setSoLinger(int value) throws IOException { - throw new UnsupportedOperationException("Cannot set SO_LINGER on a server channel."); - } - - @Override - public InetSocketAddress getRemoteAddress() { - return null; - } - @Override public void close() { getContext().closeChannel(); diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java index cf7d37493cb38..47229a0df2f6e 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransport.java @@ -19,7 +19,6 @@ package org.elasticsearch.transport.nio; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -176,8 +175,7 @@ public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel) @Override public NioTcpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { NioTcpServerChannel nioChannel = new NioTcpServerChannel(profileName, channel); - Consumer exceptionHandler = (e) -> logger.error(() -> - new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); + Consumer exceptionHandler = (e) -> onServerException(nioChannel, e); Consumer acceptor = NioTransport.this::acceptChannel; ServerChannelContext context = new ServerChannelContext(nioChannel, this, selector, acceptor, exceptionHandler); nioChannel.setContext(context); diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index 9d9008f7fb879..622020d6451db 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.network.CloseableChannel; +import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -53,6 +54,7 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_BIND_HOST; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH; @@ -74,9 +76,10 @@ public abstract class AbstractHttpServerTransport extends AbstractLifecycleCompo private final String[] bindHosts; private final String[] publishHosts; - protected final AtomicLong totalChannelsAccepted = new AtomicLong(); - protected final Set httpChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); - protected volatile BoundTransportAddress boundAddress; + private volatile BoundTransportAddress boundAddress; + private final AtomicLong totalChannelsAccepted = new AtomicLong(); + private final Set httpChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); + private final Set httpServerChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); protected AbstractHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool, NamedXContentRegistry xContentRegistry, Dispatcher dispatcher) { @@ -116,7 +119,12 @@ public HttpInfo info() { return new HttpInfo(boundTransportAddress, maxContentLength.getBytes()); } - protected BoundTransportAddress createBoundHttpAddress() { + @Override + public HttpStats stats() { + return new HttpStats(httpChannels.size(), totalChannelsAccepted.get()); + } + + protected void bindServer() { // Bind and start to accept incoming connections. InetAddress hostAddresses[]; try { @@ -138,11 +146,71 @@ protected BoundTransportAddress createBoundHttpAddress() { } final int publishPort = resolvePublishPort(settings, boundAddresses, publishInetAddress); - final InetSocketAddress publishAddress = new InetSocketAddress(publishInetAddress, publishPort); - return new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[0]), new TransportAddress(publishAddress)); + TransportAddress publishAddress = new TransportAddress(new InetSocketAddress(publishInetAddress, publishPort)); + this.boundAddress = new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[0]), publishAddress); + logger.info("{}", boundAddress); + } + + private TransportAddress bindAddress(final InetAddress hostAddress) { + final AtomicReference lastException = new AtomicReference<>(); + final AtomicReference boundSocket = new AtomicReference<>(); + boolean success = port.iterate(portNumber -> { + try { + synchronized (httpServerChannels) { + HttpServerChannel httpServerChannel = bind(new InetSocketAddress(hostAddress, portNumber)); + httpServerChannels.add(httpServerChannel); + boundSocket.set(httpServerChannel.getLocalAddress()); + } + } catch (Exception e) { + lastException.set(e); + return false; + } + return true; + }); + if (!success) { + throw new BindHttpException("Failed to bind to [" + port.getPortRangeString() + "]", lastException.get()); + } + + if (logger.isDebugEnabled()) { + logger.debug("Bound http to address {{}}", NetworkAddress.format(boundSocket.get())); + } + return new TransportAddress(boundSocket.get()); + } + + protected abstract HttpServerChannel bind(InetSocketAddress hostAddress) throws Exception; + + @Override + protected void doStop() { + synchronized (httpServerChannels) { + if (httpServerChannels.isEmpty() == false) { + try { + CloseableChannel.closeChannels(new ArrayList<>(httpServerChannels), true); + } catch (Exception e) { + logger.warn("exception while closing channels", e); + } finally { + httpServerChannels.clear(); + } + } + } + + try { + CloseableChannel.closeChannels(new ArrayList<>(httpChannels), true); + } catch (Exception e) { + logger.warn("unexpected exception while closing http channels", e); + } + httpChannels.clear(); + + stopInternal(); } - protected abstract TransportAddress bindAddress(InetAddress hostAddress); + @Override + protected void doClose() { + } + + /** + * Called to tear down internal resources + */ + protected abstract void stopInternal(); // package private for tests static int resolvePublishPort(Settings settings, List boundAddresses, InetAddress publishInetAddress) { @@ -197,19 +265,23 @@ protected void onException(HttpChannel channel, Exception e) { CloseableChannel.closeChannel(channel); } else { logger.warn(() -> new ParameterizedMessage( - "caught exception while handling client http traffic, closing connection {}", channel), e); + "caught exception while handling client http traffic, closing connection {}", channel), e); CloseableChannel.closeChannel(channel); } } + protected void onServerException(HttpServerChannel channel, Exception e) { + logger.error(new ParameterizedMessage("exception from http server channel caught on transport layer [channel={}]", channel), e); + } + /** * Exception handler for exceptions that are not associated with a specific channel. * * @param exception the exception */ protected void onNonChannelException(Exception exception) { - logger.warn(new ParameterizedMessage("exception caught on transport layer [thread={}]", Thread.currentThread().getName()), - exception); + String threadName = Thread.currentThread().getName(); + logger.warn(new ParameterizedMessage("exception caught on transport layer [thread={}]", threadName), exception); } protected void serverAcceptedChannel(HttpChannel httpChannel) { diff --git a/server/src/main/java/org/elasticsearch/http/HttpServerChannel.java b/server/src/main/java/org/elasticsearch/http/HttpServerChannel.java new file mode 100644 index 0000000000000..e4222ae816806 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/http/HttpServerChannel.java @@ -0,0 +1,34 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http; + +import org.elasticsearch.common.network.CloseableChannel; + +import java.net.InetSocketAddress; + +public interface HttpServerChannel extends CloseableChannel { + + /** + * Returns the local address for this channel. + * + * @return the local address of this channel. + */ + InetSocketAddress getLocalAddress(); +} diff --git a/server/src/main/java/org/elasticsearch/transport/TcpServerChannel.java b/server/src/main/java/org/elasticsearch/transport/TcpServerChannel.java new file mode 100644 index 0000000000000..408ec1af20b96 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/transport/TcpServerChannel.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.common.network.CloseableChannel; + +import java.net.InetSocketAddress; + + +/** + * This is a tcp channel representing a server channel listening for new connections. It is the server + * channel abstraction used by the {@link TcpTransport} and {@link TransportService}. All tcp transport + * implementations must return server channels that adhere to the required method contracts. + */ +public interface TcpServerChannel extends CloseableChannel { + + /** + * This returns the profile for this channel. + */ + String getProfile(); + + /** + * Returns the local address for this channel. + * + * @return the local address of this channel. + */ + InetSocketAddress getLocalAddress(); + +} diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index bd862c19e9c6d..c8f256c2db89a 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -21,9 +21,6 @@ import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.IntSet; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.network.CloseableChannel; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionFuture; @@ -31,6 +28,7 @@ import org.elasticsearch.action.NotifyOnceListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -52,6 +50,7 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.MeanMetric; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkUtils; @@ -68,6 +67,7 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.KeyedLock; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.rest.RestStatus; @@ -210,7 +210,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements private final ConcurrentMap profileBoundAddresses = newConcurrentMap(); // node id to actual channel private final ConcurrentMap connectedNodes = newConcurrentMap(); - private final Map> serverChannels = newConcurrentMap(); + private final Map> serverChannels = newConcurrentMap(); private final Set acceptedChannels = Collections.newSetFromMap(new ConcurrentHashMap<>()); private final KeyedLock connectionLock = new KeyedLock<>(); @@ -792,9 +792,9 @@ protected InetSocketAddress bindToPort(final String name, final InetAddress host final AtomicReference boundSocket = new AtomicReference<>(); boolean success = portsRange.iterate(portNumber -> { try { - TcpChannel channel = bind(name, new InetSocketAddress(hostAddress, portNumber)); + TcpServerChannel channel = bind(name, new InetSocketAddress(hostAddress, portNumber)); synchronized (serverChannels) { - List list = serverChannels.get(name); + List list = serverChannels.get(name); if (list == null) { list = new ArrayList<>(); serverChannels.put(name, list); @@ -957,9 +957,9 @@ protected final void doStop() { closeLock.writeLock().lock(); try { // first stop to accept any incoming connections so nobody can connect to this transport - for (Map.Entry> entry : serverChannels.entrySet()) { + for (Map.Entry> entry : serverChannels.entrySet()) { String profile = entry.getKey(); - List channels = entry.getValue(); + List channels = entry.getValue(); ActionListener closeFailLogger = ActionListener.wrap(c -> {}, e -> logger.warn(() -> new ParameterizedMessage("Error closing serverChannel for profile [{}]", profile), e)); channels.forEach(c -> c.addCloseListener(closeFailLogger)); @@ -999,7 +999,7 @@ protected final void doStop() { } } - protected void onException(TcpChannel channel, Exception e) { + public void onException(TcpChannel channel, Exception e) { if (!lifecycle.started()) { // just close and ignore - we are already stopped and just need to make sure we release all resources CloseableChannel.closeChannel(channel); @@ -1049,6 +1049,10 @@ protected void innerOnFailure(Exception e) { } } + protected void onServerException(TcpServerChannel channel, Exception e) { + logger.error(new ParameterizedMessage("exception from server channel caught on transport layer [channel={}]", channel), e); + } + /** * Exception handler for exceptions that are not associated with a specific channel. * @@ -1072,7 +1076,7 @@ protected void serverAcceptedChannel(TcpChannel channel) { * @param name the profile name * @param address the address to bind to */ - protected abstract TcpChannel bind(String name, InetSocketAddress address) throws IOException; + protected abstract TcpServerChannel bind(String name, InetSocketAddress address) throws IOException; /** * Initiate a single tcp socket channel. @@ -1087,8 +1091,7 @@ protected void serverAcceptedChannel(TcpChannel channel) { /** * Called to tear down internal resources */ - protected void stopInternal() { - } + protected abstract void stopInternal(); public boolean canCompress(TransportRequest request) { return compress && (!(request instanceof BytesTransportRequest)); diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index a7629e5f48b6c..ece9fd503c1ce 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -35,8 +35,7 @@ import org.junit.After; import org.junit.Before; -import java.io.IOException; -import java.net.InetAddress; +import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; @@ -128,8 +127,9 @@ public void dispatchBadRequest(final RestRequest request, try (AbstractHttpServerTransport transport = new AbstractHttpServerTransport(Settings.EMPTY, networkService, bigArrays, threadPool, xContentRegistry(), dispatcher) { + @Override - protected TransportAddress bindAddress(InetAddress hostAddress) { + protected HttpServerChannel bind(InetSocketAddress hostAddress) { return null; } @@ -139,12 +139,7 @@ protected void doStart() { } @Override - protected void doStop() { - - } - - @Override - protected void doClose() throws IOException { + protected void stopInternal() { } diff --git a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java index 2328aa4636361..d16300bf266d6 100644 --- a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java @@ -193,6 +193,10 @@ protected FakeChannel initiateChannel(InetSocketAddress address, ActionListener< return new FakeChannel(messageCaptor); } + @Override + protected void stopInternal() { + } + @Override public NodeChannels getConnection(DiscoveryNode node) { int numConnections = MockTcpTransport.LIGHT_PROFILE.getNumConnections(); @@ -237,7 +241,7 @@ public NodeChannels getConnection(DiscoveryNode node) { } } - private static final class FakeChannel implements TcpChannel { + private static final class FakeChannel implements TcpChannel, TcpServerChannel { private final AtomicReference messageCaptor; diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index 8831c46c01136..bbff340c86011 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -225,7 +225,7 @@ private void configureSocket(Socket socket) throws SocketException { socket.setReuseAddress(TCP_REUSE_ADDRESS.get(settings)); } - public final class MockChannel implements Closeable, TcpChannel { + public final class MockChannel implements Closeable, TcpChannel, TcpServerChannel { private final AtomicBoolean isOpen = new AtomicBoolean(true); private final InetSocketAddress localAddress; private final ServerSocket serverSocket; diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java index cb9e243660a8e..2ab8719c33422 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java @@ -41,6 +41,7 @@ import org.elasticsearch.nio.ServerChannelContext; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TcpChannel; +import org.elasticsearch.transport.TcpServerChannel; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transports; @@ -191,7 +192,7 @@ public int consumeReads(InboundChannelBuffer channelBuffer) throws IOException { } } - private static class MockServerChannel extends NioServerSocketChannel implements TcpChannel { + private static class MockServerChannel extends NioServerSocketChannel implements TcpServerChannel { private final String profile; @@ -215,21 +216,6 @@ public String getProfile() { public void addCloseListener(ActionListener listener) { addCloseListener(ActionListener.toBiConsumer(listener)); } - - @Override - public void setSoLinger(int value) throws IOException { - throw new UnsupportedOperationException("Cannot set SO_LINGER on a server channel."); - } - - @Override - public InetSocketAddress getRemoteAddress() { - return null; - } - - @Override - public void sendMessage(BytesReference reference, ActionListener listener) { - throw new UnsupportedOperationException("Cannot send a message to a server channel."); - } } private static class MockSocketChannel extends NioSocketChannel implements TcpChannel { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java index ce06712722cd1..b761439b15b6a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java @@ -109,7 +109,7 @@ protected ChannelHandler getClientChannelInitializer() { } @Override - protected void onException(TcpChannel channel, Exception e) { + public void onException(TcpChannel channel, Exception e) { if (!lifecycle.started()) { // just close and ignore - we are already stopped and just need to make sure we release all resources CloseableChannel.closeChannel(channel); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java index 161ac3678aeab..9427812ba1349 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java @@ -24,7 +24,7 @@ import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.transport.netty4.NettyTcpChannel; +import org.elasticsearch.transport.netty4.Netty4TcpChannel; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.user.KibanaUser; @@ -116,8 +116,8 @@ requests from all the nodes are attached with a user (either a serialize } if (extractClientCert && (unwrappedChannel instanceof TcpTransportChannel) && - ((TcpTransportChannel) unwrappedChannel).getChannel() instanceof NettyTcpChannel) { - Channel channel = ((NettyTcpChannel) ((TcpTransportChannel) unwrappedChannel).getChannel()).getLowLevelChannel(); + ((TcpTransportChannel) unwrappedChannel).getChannel() instanceof Netty4TcpChannel) { + Channel channel = ((Netty4TcpChannel) ((TcpTransportChannel) unwrappedChannel).getChannel()).getLowLevelChannel(); SslHandler sslHandler = channel.pipeline().get(SslHandler.class); if (channel.isOpen()) { assert sslHandler != null : "channel [" + channel + "] did not have a ssl handler. pipeline " + channel.pipeline(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java index 5315a944f778d..fd1b1198607d1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.security.transport.nio; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.recycler.Recycler; @@ -131,9 +130,8 @@ public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel) @Override public NioTcpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { - NioTcpServerChannel nioChannel = new NioTcpServerChannel(profileName, channel); - Consumer exceptionHandler = (e) -> logger.error(() -> - new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); + NioTcpServerChannel nioChannel = new NioTcpServerChannel(profileName, channel);; + Consumer exceptionHandler = (e) -> onServerException(nioChannel, e); Consumer acceptor = SecurityNioTransport.this::acceptChannel; ServerChannelContext context = new ServerChannelContext(nioChannel, this, selector, acceptor, exceptionHandler); nioChannel.setContext(context); From 00283a61e1dbcd6990f0ed7d369261a31fc89d8c Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 20 Jun 2018 16:26:26 -0700 Subject: [PATCH 02/34] Remove unused generic type for client execute method (#31444) This commit removes the request builder generic type for AbstractClient as it was unused. --- .../index/reindex/AsyncBulkByScrollActionTests.java | 6 ++---- .../java/org/elasticsearch/client/FilterClient.java | 6 ++---- .../client/ParentTaskAssigningClient.java | 7 ++----- .../org/elasticsearch/client/node/NodeClient.java | 7 ++----- .../elasticsearch/client/support/AbstractClient.java | 7 ++++--- .../client/transport/TransportClient.java | 4 ++-- .../transport/RemoteClusterAwareClient.java | 4 +--- .../client/ParentTaskAssigningClientTests.java | 8 ++------ .../org/elasticsearch/test/client/NoOpClient.java | 7 ++----- .../org/elasticsearch/xpack/core/ClientHelper.java | 12 +++++------- .../TransportSamlInvalidateSessionActionTests.java | 5 +---- .../audit/index/IndexAuditTrailMutedTests.java | 6 ++---- .../authc/esnative/NativeUsersStoreTests.java | 11 ++--------- .../security/support/SecurityIndexManagerTests.java | 8 ++------ 14 files changed, 31 insertions(+), 67 deletions(-) diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 9d22b90ee7f5b..6a5610de37a01 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; @@ -743,9 +742,8 @@ private class MyMockClient extends FilterClient { @Override @SuppressWarnings("unchecked") - protected > void doExecute( - Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { if (false == expectedHeaders.equals(threadPool().getThreadContext().getHeaders())) { listener.onFailure( new RuntimeException("Expected " + expectedHeaders + " but got " + threadPool().getThreadContext().getHeaders())); diff --git a/server/src/main/java/org/elasticsearch/client/FilterClient.java b/server/src/main/java/org/elasticsearch/client/FilterClient.java index bfccabac58043..b4230710414be 100644 --- a/server/src/main/java/org/elasticsearch/client/FilterClient.java +++ b/server/src/main/java/org/elasticsearch/client/FilterClient.java @@ -21,13 +21,11 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; - /** * A {@link Client} that contains another {@link Client} which it * uses as its basic source, possibly transforming the requests / responses along the @@ -62,8 +60,8 @@ public void close() { } @Override - protected > void doExecute( - Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { in().execute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java b/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java index a0934ba633dd5..27de6619053b3 100644 --- a/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java +++ b/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.tasks.Task; @@ -58,10 +57,8 @@ public Client unwrap() { } @Override - protected < Request extends ActionRequest, - Response extends ActionResponse, - RequestBuilder extends ActionRequestBuilder - > void doExecute(Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { request.setParentTask(parentTask); super.doExecute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/client/node/NodeClient.java b/server/src/main/java/org/elasticsearch/client/node/NodeClient.java index 9e50fa56fab60..0ad863c936741 100644 --- a/server/src/main/java/org/elasticsearch/client/node/NodeClient.java +++ b/server/src/main/java/org/elasticsearch/client/node/NodeClient.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.Client; @@ -67,10 +66,8 @@ public void close() { } @Override - public < Request extends ActionRequest, - Response extends ActionResponse, - RequestBuilder extends ActionRequestBuilder - > void doExecute(Action action, Request request, ActionListener listener) { + public + void doExecute(Action action, Request request, ActionListener listener) { // Discard the task because the Client interface doesn't use it. executeLocally(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 12db219f8ec78..31e5e3190079d 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainAction; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainRequest; @@ -401,7 +400,8 @@ public final vo doExecute(action, request, listener); } - protected abstract > void doExecute(Action action, Request request, ActionListener listener); + protected abstract + void doExecute(Action action, Request request, ActionListener listener); @Override public ActionFuture index(final IndexRequest request) { @@ -1764,7 +1764,8 @@ public void getSettings(GetSettingsRequest request, ActionListener headers) { return new FilterClient(this) { @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { ThreadContext threadContext = threadPool().getThreadContext(); try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(headers)) { super.doExecute(action, request, listener); diff --git a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 53f6dea21c7d1..ba18105e3f1ca 100644 --- a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.cluster.ClusterModule; @@ -377,7 +376,8 @@ public void close() { } @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { proxy.execute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java index aca8cdccaddb8..d93bbb57201e2 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.support.AbstractClient; @@ -43,8 +42,7 @@ final class RemoteClusterAwareClient extends AbstractClient { } @Override - protected > + protected void doExecute(Action action, Request request, ActionListener listener) { remoteClusterService.ensureConnected(clusterAlias, ActionListener.wrap(res -> { Transport.Connection connection = remoteClusterService.getConnection(clusterAlias); diff --git a/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java b/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java index bff713a225482..eb1338ad78816 100644 --- a/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java +++ b/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.search.ClearScrollRequest; @@ -38,11 +37,8 @@ public void testSetsParentId() { // This mock will do nothing but verify that parentTaskId is set on all requests sent to it. NoOpClient mock = new NoOpClient(getTestName()) { @Override - protected < Request extends ActionRequest, - Response extends ActionResponse, - RequestBuilder extends ActionRequestBuilder - > void doExecute(Action action, Request request, - ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { assertEquals(parentTaskId[0], request.getParentTask()); super.doExecute(action, request, listener); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java index 8ef08a259821a..d95e1d32663e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.common.settings.Settings; @@ -51,10 +50,8 @@ public NoOpClient(String testName) { } @Override - protected > - void doExecute(Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { listener.onResponse(null); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java index ff3091bde93b9..0657eb013972a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java @@ -138,10 +138,9 @@ public static T executeWithHeaders(Map> void executeWithHeadersAsync( - Map headers, String origin, Client client, Action action, Request request, - ActionListener listener) { + public static + void executeWithHeadersAsync(Map headers, String origin, Client client, Action action, Request request, + ActionListener listener) { Map filteredHeaders = headers.entrySet().stream().filter(e -> SECURITY_HEADER_FILTERS.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); @@ -176,9 +175,8 @@ private ClientWithOrigin(Client in, String origin) { } @Override - protected > void doExecute( - Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { final Supplier supplier = in().threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = in().threadPool().getThreadContext().stashContext()) { in().threadPool().getThreadContext().putTransient(ACTION_ORIGIN_TRANSIENT_NAME, origin); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 85d1d4a161d1d..b46d307866284 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; @@ -123,9 +122,7 @@ public void setup() throws Exception { searchRequests = new ArrayList<>(); final Client client = new NoOpClient(threadPool) { @Override - protected > + protected void doExecute(Action action, Request request, ActionListener listener) { if (IndexAction.NAME.equals(action.name())) { assertThat(request, instanceOf(IndexRequest.class)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java index c17134093c593..9bc5c989d1f9c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; @@ -70,9 +69,8 @@ class IClient extends FilterClient { } @Override - protected > void doExecute( - Action action, Request request, ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { clientCalled.set(true); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java index 3d739d57f480c..9fbcaa493dd96 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; @@ -72,14 +71,8 @@ public void setupMocks() { client = new FilterClient(mockClient) { @Override - protected < - Request extends ActionRequest, - Response extends ActionResponse, - RequestBuilder extends ActionRequestBuilder - > void doExecute( - Action action, - Request request, - ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { requests.add(new Tuple<>(request, listener)); } }; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index 928c9bbd1b143..7d10198c6aea8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.client.Client; @@ -80,11 +79,8 @@ public void setUpManager() { actions = new LinkedHashMap<>(); final Client client = new FilterClient(mockClient) { @Override - protected > - void doExecute(Action action, Request request, - ActionListener listener) { + protected + void doExecute(Action action, Request request, ActionListener listener) { final Map> map = actions.getOrDefault(action, new HashMap<>()); map.put(request, listener); actions.put(action, map); From 86423f9563dd41cbf2a15b16962e2878ffe98185 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 20 Jun 2018 19:50:14 -0600 Subject: [PATCH 03/34] Ensure local addresses aren't null (#31440) Currently we set local addresses on the creation time of a NioChannel. However, this may return null as the local address may not have been set yet. An example is the local address has not been set on a client channel as the connection process is not yet complete. This PR modifies the getter to set the local field if it is currently null. --- .../org/elasticsearch/nio/ChannelFactory.java | 6 +++++ .../org/elasticsearch/nio/NioChannel.java | 13 ++-------- .../nio/NioServerSocketChannel.java | 25 ++++++++++++++----- .../elasticsearch/nio/NioSocketChannel.java | 21 +++++++++++++--- .../elasticsearch/nio/EventHandlerTests.java | 5 +++- .../http/nio/NioHttpChannel.java | 3 +-- .../transport/nio/NioTcpChannel.java | 2 +- .../transport/nio/NioTcpServerChannel.java | 4 +-- .../transport/nio/MockNioTransport.java | 8 +++--- 9 files changed, 54 insertions(+), 33 deletions(-) diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/ChannelFactory.java b/libs/nio/src/main/java/org/elasticsearch/nio/ChannelFactory.java index 77443d948d9a6..f0dc3e567fef6 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/ChannelFactory.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/ChannelFactory.java @@ -21,6 +21,7 @@ import java.io.Closeable; import java.io.IOException; +import java.io.UncheckedIOException; import java.net.InetSocketAddress; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; @@ -99,6 +100,11 @@ private Socket internalCreateChannel(NioSelector selector, SocketChannel rawChan Socket channel = createChannel(selector, rawChannel); assert channel.getContext() != null : "channel context should have been set on channel"; return channel; + } catch (UncheckedIOException e) { + // This can happen if getRemoteAddress throws IOException. + IOException cause = e.getCause(); + closeRawChannel(rawChannel, cause); + throw cause; } catch (Exception e) { closeRawChannel(rawChannel, e); throw e; diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/NioChannel.java b/libs/nio/src/main/java/org/elasticsearch/nio/NioChannel.java index 2cc2bd260f0b2..55038fabcef8e 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/NioChannel.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/NioChannel.java @@ -19,7 +19,6 @@ package org.elasticsearch.nio; -import java.io.IOException; import java.net.InetSocketAddress; import java.nio.channels.NetworkChannel; import java.util.function.BiConsumer; @@ -32,20 +31,10 @@ */ public abstract class NioChannel { - private final InetSocketAddress localAddress; - - NioChannel(NetworkChannel socketChannel) throws IOException { - this.localAddress = (InetSocketAddress) socketChannel.getLocalAddress(); - } - public boolean isOpen() { return getContext().isOpen(); } - public InetSocketAddress getLocalAddress() { - return localAddress; - } - /** * Adds a close listener to the channel. Multiple close listeners can be added. There is no guarantee * about the order in which close listeners will be executed. If the channel is already closed, the @@ -64,6 +53,8 @@ public void close() { getContext().closeChannel(); } + public abstract InetSocketAddress getLocalAddress(); + public abstract NetworkChannel getRawChannel(); public abstract ChannelContext getContext(); diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/NioServerSocketChannel.java b/libs/nio/src/main/java/org/elasticsearch/nio/NioServerSocketChannel.java index 9f78c3b1b319d..a335e6925881a 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/NioServerSocketChannel.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/NioServerSocketChannel.java @@ -19,19 +19,20 @@ package org.elasticsearch.nio; -import java.io.IOException; +import java.net.InetSocketAddress; import java.nio.channels.ServerSocketChannel; import java.util.concurrent.atomic.AtomicBoolean; public class NioServerSocketChannel extends NioChannel { - private final ServerSocketChannel socketChannel; + private final ServerSocketChannel serverSocketChannel; private final AtomicBoolean contextSet = new AtomicBoolean(false); + private volatile InetSocketAddress localAddress; private ServerChannelContext context; - public NioServerSocketChannel(ServerSocketChannel socketChannel) throws IOException { - super(socketChannel); - this.socketChannel = socketChannel; + public NioServerSocketChannel(ServerSocketChannel serverSocketChannel) { + this.serverSocketChannel = serverSocketChannel; + attemptToSetLocalAddress(); } /** @@ -48,9 +49,15 @@ public void setContext(ServerChannelContext context) { } } + @Override + public InetSocketAddress getLocalAddress() { + attemptToSetLocalAddress(); + return localAddress; + } + @Override public ServerSocketChannel getRawChannel() { - return socketChannel; + return serverSocketChannel; } @Override @@ -64,4 +71,10 @@ public String toString() { "localAddress=" + getLocalAddress() + '}'; } + + private void attemptToSetLocalAddress() { + if (localAddress == null) { + localAddress = (InetSocketAddress) serverSocketChannel.socket().getLocalSocketAddress(); + } + } } diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/NioSocketChannel.java b/libs/nio/src/main/java/org/elasticsearch/nio/NioSocketChannel.java index 32e934766913e..c7d44990837cd 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/NioSocketChannel.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/NioSocketChannel.java @@ -20,6 +20,7 @@ package org.elasticsearch.nio; import java.io.IOException; +import java.io.UncheckedIOException; import java.net.InetSocketAddress; import java.nio.channels.SocketChannel; import java.util.concurrent.atomic.AtomicBoolean; @@ -27,15 +28,19 @@ public class NioSocketChannel extends NioChannel { - private final InetSocketAddress remoteAddress; private final AtomicBoolean contextSet = new AtomicBoolean(false); private final SocketChannel socketChannel; + private final InetSocketAddress remoteAddress; + private volatile InetSocketAddress localAddress; private SocketChannelContext context; - public NioSocketChannel(SocketChannel socketChannel) throws IOException { - super(socketChannel); + public NioSocketChannel(SocketChannel socketChannel) { this.socketChannel = socketChannel; - this.remoteAddress = (InetSocketAddress) socketChannel.getRemoteAddress(); + try { + this.remoteAddress = (InetSocketAddress) socketChannel.getRemoteAddress(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } public void setContext(SocketChannelContext context) { @@ -46,6 +51,14 @@ public void setContext(SocketChannelContext context) { } } + @Override + public InetSocketAddress getLocalAddress() { + if (localAddress == null) { + localAddress = (InetSocketAddress) socketChannel.socket().getLocalSocketAddress(); + } + return localAddress; + } + @Override public SocketChannel getRawChannel() { return socketChannel; diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java index a9e1836199e25..0cc3aa048008a 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java @@ -23,6 +23,7 @@ import org.junit.Before; import java.io.IOException; +import java.net.ServerSocket; import java.nio.channels.CancelledKeyException; import java.nio.channels.SelectionKey; import java.nio.channels.ServerSocketChannel; @@ -69,7 +70,9 @@ public void setUpHandler() throws IOException { channel.setContext(context); handler.handleRegistration(context); - NioServerSocketChannel serverChannel = new NioServerSocketChannel(mock(ServerSocketChannel.class)); + ServerSocketChannel serverSocketChannel = mock(ServerSocketChannel.class); + when(serverSocketChannel.socket()).thenReturn(mock(ServerSocket.class)); + NioServerSocketChannel serverChannel = new NioServerSocketChannel(serverSocketChannel); serverContext = new DoNotRegisterServerContext(serverChannel, mock(NioSelector.class), mock(Consumer.class)); serverChannel.setContext(serverContext); diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java index 255faab5ddad0..0a797a5687ec7 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java @@ -24,12 +24,11 @@ import org.elasticsearch.http.HttpResponse; import org.elasticsearch.nio.NioSocketChannel; -import java.io.IOException; import java.nio.channels.SocketChannel; public class NioHttpChannel extends NioSocketChannel implements HttpChannel { - NioHttpChannel(SocketChannel socketChannel) throws IOException { + NioHttpChannel(SocketChannel socketChannel) { super(socketChannel); } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java index d700ad567bc19..947a255b178c8 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpChannel.java @@ -32,7 +32,7 @@ public class NioTcpChannel extends NioSocketChannel implements TcpChannel { private final String profile; - public NioTcpChannel(String profile, SocketChannel socketChannel) throws IOException { + public NioTcpChannel(String profile, SocketChannel socketChannel) { super(socketChannel); this.profile = profile; } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java index 3c6d4b12df943..0d4b00f14b461 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTcpServerChannel.java @@ -23,7 +23,6 @@ import org.elasticsearch.nio.NioServerSocketChannel; import org.elasticsearch.transport.TcpServerChannel; -import java.io.IOException; import java.nio.channels.ServerSocketChannel; /** @@ -34,12 +33,11 @@ public class NioTcpServerChannel extends NioServerSocketChannel implements TcpSe private final String profile; - public NioTcpServerChannel(String profile, ServerSocketChannel socketChannel) throws IOException { + public NioTcpServerChannel(String profile, ServerSocketChannel socketChannel) { super(socketChannel); this.profile = profile; } - @Override public void close() { getContext().closeChannel(); } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java index 2ab8719c33422..3eca4818c4ab1 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/MockNioTransport.java @@ -165,7 +165,7 @@ public MockSocketChannel createChannel(NioSelector selector, SocketChannel chann @Override public MockServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) throws IOException { - MockServerChannel nioServerChannel = new MockServerChannel(profileName, channel, this, selector); + MockServerChannel nioServerChannel = new MockServerChannel(profileName, channel); Consumer exceptionHandler = (e) -> logger.error(() -> new ParameterizedMessage("exception from server channel caught on transport layer [{}]", channel), e); ServerChannelContext context = new ServerChannelContext(nioServerChannel, this, selector, MockNioTransport.this::acceptChannel, @@ -196,8 +196,7 @@ private static class MockServerChannel extends NioServerSocketChannel implements private final String profile; - MockServerChannel(String profile, ServerSocketChannel channel, ChannelFactory channelFactory, NioSelector selector) - throws IOException { + MockServerChannel(String profile, ServerSocketChannel channel) { super(channel); this.profile = profile; } @@ -222,8 +221,7 @@ private static class MockSocketChannel extends NioSocketChannel implements TcpCh private final String profile; - private MockSocketChannel(String profile, java.nio.channels.SocketChannel socketChannel, NioSelector selector) - throws IOException { + private MockSocketChannel(String profile, java.nio.channels.SocketChannel socketChannel, NioSelector selector) { super(socketChannel); this.profile = profile; } From da69ab28c70bb2817da1efcdd45a14781f741157 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 21 Jun 2018 16:00:26 +0200 Subject: [PATCH 04/34] Return transport addresses from UnicastHostsProvider (#31426) With #20695 we removed local transport and there is just TransportAddress now. The UnicastHostsProvider currently returns DiscoveryNode instances, where, during pinging, we're actually only making use of the TransportAddress to establish a first connection to the possible new node. To simplify the interface, we can just return a list of transport addresses instead, which means that it's not necessary anymore to create fake node objects in each plugin just to return the address information. --- .../classic/AzureUnicastHostsProvider.java | 26 +++---- .../ec2/AwsEc2UnicastHostsProvider.java | 33 ++++---- .../discovery/ec2/Ec2DiscoveryTests.java | 75 +++++++++---------- .../file/FileBasedUnicastHostsProvider.java | 14 ++-- .../FileBasedUnicastHostsProviderTests.java | 43 +++++------ .../gce/GceUnicastHostsProvider.java | 25 +++---- .../discovery/gce/GceDiscoveryTests.java | 53 +++++++------ .../discovery/zen/UnicastHostsProvider.java | 4 +- .../discovery/zen/UnicastZenPing.java | 75 ++++++++----------- .../single/SingleNodeDiscoveryIT.java | 2 +- .../discovery/zen/UnicastZenPingTests.java | 41 +++++----- .../discovery/MockUncasedHostProvider.java | 4 +- 12 files changed, 175 insertions(+), 220 deletions(-) diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java index 2bc6cc4b130cd..482dafb008fc5 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java @@ -24,12 +24,10 @@ import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; import com.microsoft.windowsazure.management.compute.models.InstanceEndpoint; import com.microsoft.windowsazure.management.compute.models.RoleInstance; -import org.elasticsearch.Version; import org.elasticsearch.cloud.azure.classic.AzureServiceDisableException; import org.elasticsearch.cloud.azure.classic.AzureServiceRemoteException; import org.elasticsearch.cloud.azure.classic.management.AzureComputeService; import org.elasticsearch.cloud.azure.classic.management.AzureComputeService.Discovery; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.network.InetAddresses; @@ -47,9 +45,6 @@ import java.util.ArrayList; import java.util.List; -import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; - public class AzureUnicastHostsProvider extends AbstractComponent implements UnicastHostsProvider { public enum HostType { @@ -104,7 +99,7 @@ public static Deployment fromString(String string) { private final TimeValue refreshInterval; private long lastRefresh; - private List cachedDiscoNodes; + private List dynamicHosts; private final HostType hostType; private final String publicEndpointName; private final String deploymentName; @@ -137,30 +132,30 @@ public AzureUnicastHostsProvider(Settings settings, AzureComputeService azureCom * Setting `cloud.azure.refresh_interval` to `0` will disable caching (default). */ @Override - public List buildDynamicNodes() { + public List buildDynamicHosts() { if (refreshInterval.millis() != 0) { - if (cachedDiscoNodes != null && + if (dynamicHosts != null && (refreshInterval.millis() < 0 || (System.currentTimeMillis() - lastRefresh) < refreshInterval.millis())) { logger.trace("using cache to retrieve node list"); - return cachedDiscoNodes; + return dynamicHosts; } lastRefresh = System.currentTimeMillis(); } logger.debug("start building nodes list using Azure API"); - cachedDiscoNodes = new ArrayList<>(); + dynamicHosts = new ArrayList<>(); HostedServiceGetDetailedResponse detailed; try { detailed = azureComputeService.getServiceDetails(); } catch (AzureServiceDisableException e) { logger.debug("Azure discovery service has been disabled. Returning empty list of nodes."); - return cachedDiscoNodes; + return dynamicHosts; } catch (AzureServiceRemoteException e) { // We got a remote exception logger.warn("can not get list of azure nodes: [{}]. Returning empty list of nodes.", e.getMessage()); logger.trace("AzureServiceRemoteException caught", e); - return cachedDiscoNodes; + return dynamicHosts; } InetAddress ipAddress = null; @@ -212,8 +207,7 @@ public List buildDynamicNodes() { TransportAddress[] addresses = transportService.addressesFromString(networkAddress, 1); for (TransportAddress address : addresses) { logger.trace("adding {}, transport_address {}", networkAddress, address); - cachedDiscoNodes.add(new DiscoveryNode("#cloud-" + instance.getInstanceName(), address, emptyMap(), - emptySet(), Version.CURRENT.minimumCompatibilityVersion())); + dynamicHosts.add(address); } } catch (Exception e) { logger.warn("can not convert [{}] to transport address. skipping. [{}]", networkAddress, e.getMessage()); @@ -221,9 +215,9 @@ public List buildDynamicNodes() { } } - logger.debug("{} node(s) added", cachedDiscoNodes.size()); + logger.debug("{} addresses added", dynamicHosts.size()); - return cachedDiscoNodes; + return dynamicHosts; } protected String resolveInstanceAddress(final HostType hostType, final RoleInstance instance) { diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java index 2c536981b04c5..396e9f707d404 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java @@ -29,8 +29,6 @@ import com.amazonaws.services.ec2.model.Tag; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -46,8 +44,6 @@ import java.util.Set; import static java.util.Collections.disjoint; -import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; import static org.elasticsearch.discovery.ec2.AwsEc2Service.HostType.TAG_PREFIX; import static org.elasticsearch.discovery.ec2.AwsEc2Service.HostType.PRIVATE_DNS; import static org.elasticsearch.discovery.ec2.AwsEc2Service.HostType.PRIVATE_IP; @@ -70,7 +66,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos private final String hostType; - private final DiscoNodesCache discoNodes; + private final TransportAddressesCache dynamicHosts; AwsEc2UnicastHostsProvider(Settings settings, TransportService transportService, AwsEc2Service awsEc2Service) { super(settings); @@ -78,7 +74,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos this.awsEc2Service = awsEc2Service; this.hostType = AwsEc2Service.HOST_TYPE_SETTING.get(settings); - this.discoNodes = new DiscoNodesCache(AwsEc2Service.NODE_CACHE_TIME_SETTING.get(settings)); + this.dynamicHosts = new TransportAddressesCache(AwsEc2Service.NODE_CACHE_TIME_SETTING.get(settings)); this.bindAnyGroup = AwsEc2Service.ANY_GROUP_SETTING.get(settings); this.groups = new HashSet<>(); @@ -96,13 +92,13 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos } @Override - public List buildDynamicNodes() { - return discoNodes.getOrRefresh(); + public List buildDynamicHosts() { + return dynamicHosts.getOrRefresh(); } - protected List fetchDynamicNodes() { + protected List fetchDynamicNodes() { - final List discoNodes = new ArrayList<>(); + final List dynamicHosts = new ArrayList<>(); final DescribeInstancesResult descInstances; try (AmazonEc2Reference clientReference = awsEc2Service.client()) { @@ -115,7 +111,7 @@ protected List fetchDynamicNodes() { } catch (final AmazonClientException e) { logger.info("Exception while retrieving instance list from AWS API: {}", e.getMessage()); logger.debug("Full exception:", e); - return discoNodes; + return dynamicHosts; } logger.trace("building dynamic unicast discovery nodes..."); @@ -179,8 +175,7 @@ && disjoint(securityGroupIds, groups)) { final TransportAddress[] addresses = transportService.addressesFromString(address, 1); for (int i = 0; i < addresses.length; i++) { logger.trace("adding {}, address {}, transport_address {}", instance.getInstanceId(), address, addresses[i]); - discoNodes.add(new DiscoveryNode(instance.getInstanceId(), "#cloud-" + instance.getInstanceId() + "-" + i, - addresses[i], emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion())); + dynamicHosts.add(addresses[i]); } } catch (final Exception e) { final String finalAddress = address; @@ -194,9 +189,9 @@ && disjoint(securityGroupIds, groups)) { } } - logger.debug("using dynamic discovery nodes {}", discoNodes); + logger.debug("using dynamic transport addresses {}", dynamicHosts); - return discoNodes; + return dynamicHosts; } private DescribeInstancesRequest buildDescribeInstancesRequest() { @@ -222,11 +217,11 @@ private DescribeInstancesRequest buildDescribeInstancesRequest() { return describeInstancesRequest; } - private final class DiscoNodesCache extends SingleObjectCache> { + private final class TransportAddressesCache extends SingleObjectCache> { private boolean empty = true; - protected DiscoNodesCache(TimeValue refreshInterval) { + protected TransportAddressesCache(TimeValue refreshInterval) { super(refreshInterval, new ArrayList<>()); } @@ -236,8 +231,8 @@ protected boolean needsRefresh() { } @Override - protected List refresh() { - final List nodes = fetchDynamicNodes(); + protected List refresh() { + final List nodes = fetchDynamicNodes(); empty = nodes.isEmpty(); return nodes; } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index 43cc924fadb10..9dc2e02edc1b5 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -21,7 +21,6 @@ import com.amazonaws.services.ec2.model.Tag; import org.elasticsearch.Version; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -87,16 +86,16 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi null); } - protected List buildDynamicNodes(Settings nodeSettings, int nodes) { - return buildDynamicNodes(nodeSettings, nodes, null); + protected List buildDynamicHosts(Settings nodeSettings, int nodes) { + return buildDynamicHosts(nodeSettings, nodes, null); } - protected List buildDynamicNodes(Settings nodeSettings, int nodes, List> tagsList) { + protected List buildDynamicHosts(Settings nodeSettings, int nodes, List> tagsList) { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY, nodes, tagsList)) { AwsEc2UnicastHostsProvider provider = new AwsEc2UnicastHostsProvider(nodeSettings, transportService, plugin.ec2Service); - List discoveryNodes = provider.buildDynamicNodes(); - logger.debug("--> nodes found: {}", discoveryNodes); - return discoveryNodes; + List dynamicHosts = provider.buildDynamicHosts(); + logger.debug("--> addresses found: {}", dynamicHosts); + return dynamicHosts; } catch (IOException e) { fail("Unexpected IOException"); return null; @@ -107,7 +106,7 @@ public void testDefaultSettings() throws InterruptedException { int nodes = randomInt(10); Settings nodeSettings = Settings.builder() .build(); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes); + List discoveryNodes = buildDynamicHosts(nodeSettings, nodes); assertThat(discoveryNodes, hasSize(nodes)); } @@ -119,12 +118,11 @@ public void testPrivateIp() throws InterruptedException { Settings nodeSettings = Settings.builder() .put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "private_ip") .build(); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes); - assertThat(discoveryNodes, hasSize(nodes)); + List transportAddresses = buildDynamicHosts(nodeSettings, nodes); + assertThat(transportAddresses, hasSize(nodes)); // We check that we are using here expected address int node = 1; - for (DiscoveryNode discoveryNode : discoveryNodes) { - TransportAddress address = discoveryNode.getAddress(); + for (TransportAddress address : transportAddresses) { TransportAddress expected = poorMansDNS.get(AmazonEC2Mock.PREFIX_PRIVATE_IP + node++); assertEquals(address, expected); } @@ -138,12 +136,11 @@ public void testPublicIp() throws InterruptedException { Settings nodeSettings = Settings.builder() .put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "public_ip") .build(); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes); - assertThat(discoveryNodes, hasSize(nodes)); + List dynamicHosts = buildDynamicHosts(nodeSettings, nodes); + assertThat(dynamicHosts, hasSize(nodes)); // We check that we are using here expected address int node = 1; - for (DiscoveryNode discoveryNode : discoveryNodes) { - TransportAddress address = discoveryNode.getAddress(); + for (TransportAddress address : dynamicHosts) { TransportAddress expected = poorMansDNS.get(AmazonEC2Mock.PREFIX_PUBLIC_IP + node++); assertEquals(address, expected); } @@ -159,13 +156,12 @@ public void testPrivateDns() throws InterruptedException { Settings nodeSettings = Settings.builder() .put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "private_dns") .build(); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes); - assertThat(discoveryNodes, hasSize(nodes)); + List dynamicHosts = buildDynamicHosts(nodeSettings, nodes); + assertThat(dynamicHosts, hasSize(nodes)); // We check that we are using here expected address int node = 1; - for (DiscoveryNode discoveryNode : discoveryNodes) { + for (TransportAddress address : dynamicHosts) { String instanceId = "node" + node++; - TransportAddress address = discoveryNode.getAddress(); TransportAddress expected = poorMansDNS.get( AmazonEC2Mock.PREFIX_PRIVATE_DNS + instanceId + AmazonEC2Mock.SUFFIX_PRIVATE_DNS); assertEquals(address, expected); @@ -182,13 +178,12 @@ public void testPublicDns() throws InterruptedException { Settings nodeSettings = Settings.builder() .put(AwsEc2Service.HOST_TYPE_SETTING.getKey(), "public_dns") .build(); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes); - assertThat(discoveryNodes, hasSize(nodes)); + List dynamicHosts = buildDynamicHosts(nodeSettings, nodes); + assertThat(dynamicHosts, hasSize(nodes)); // We check that we are using here expected address int node = 1; - for (DiscoveryNode discoveryNode : discoveryNodes) { + for (TransportAddress address : dynamicHosts) { String instanceId = "node" + node++; - TransportAddress address = discoveryNode.getAddress(); TransportAddress expected = poorMansDNS.get( AmazonEC2Mock.PREFIX_PUBLIC_DNS + instanceId + AmazonEC2Mock.SUFFIX_PUBLIC_DNS); assertEquals(address, expected); @@ -201,7 +196,7 @@ public void testInvalidHostType() throws InterruptedException { .build(); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { - buildDynamicNodes(nodeSettings, 1); + buildDynamicHosts(nodeSettings, 1); }); assertThat(exception.getMessage(), containsString("does_not_exist is unknown for discovery.ec2.host_type")); } @@ -227,8 +222,8 @@ public void testFilterByTags() throws InterruptedException { } logger.info("started [{}] instances with [{}] stage=prod tag", nodes, prodInstances); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes, tagsList); - assertThat(discoveryNodes, hasSize(prodInstances)); + List dynamicHosts = buildDynamicHosts(nodeSettings, nodes, tagsList); + assertThat(dynamicHosts, hasSize(prodInstances)); } public void testFilterByMultipleTags() throws InterruptedException { @@ -258,8 +253,8 @@ public void testFilterByMultipleTags() throws InterruptedException { } logger.info("started [{}] instances with [{}] stage=prod tag", nodes, prodInstances); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes, tagsList); - assertThat(discoveryNodes, hasSize(prodInstances)); + List dynamicHosts = buildDynamicHosts(nodeSettings, nodes, tagsList); + assertThat(dynamicHosts, hasSize(prodInstances)); } public void testReadHostFromTag() throws InterruptedException, UnknownHostException { @@ -285,11 +280,11 @@ public void testReadHostFromTag() throws InterruptedException, UnknownHostExcept } logger.info("started [{}] instances", nodes); - List discoveryNodes = buildDynamicNodes(nodeSettings, nodes, tagsList); - assertThat(discoveryNodes, hasSize(nodes)); - for (DiscoveryNode discoveryNode : discoveryNodes) { - TransportAddress address = discoveryNode.getAddress(); - TransportAddress expected = poorMansDNS.get(discoveryNode.getName()); + List dynamicHosts = buildDynamicHosts(nodeSettings, nodes, tagsList); + assertThat(dynamicHosts, hasSize(nodes)); + int node = 1; + for (TransportAddress address : dynamicHosts) { + TransportAddress expected = poorMansDNS.get("node" + node++); assertEquals(address, expected); } } @@ -306,13 +301,13 @@ public void testGetNodeListEmptyCache() throws Exception { AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(Settings.EMPTY, 1, null); DummyEc2HostProvider provider = new DummyEc2HostProvider(Settings.EMPTY, transportService, awsEc2Service) { @Override - protected List fetchDynamicNodes() { + protected List fetchDynamicNodes() { fetchCount++; return new ArrayList<>(); } }; for (int i=0; i<3; i++) { - provider.buildDynamicNodes(); + provider.buildDynamicHosts(); } assertThat(provider.fetchCount, is(3)); } @@ -323,18 +318,18 @@ public void testGetNodeListCached() throws Exception { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY)) { DummyEc2HostProvider provider = new DummyEc2HostProvider(builder.build(), transportService, plugin.ec2Service) { @Override - protected List fetchDynamicNodes() { + protected List fetchDynamicNodes() { fetchCount++; - return Ec2DiscoveryTests.this.buildDynamicNodes(Settings.EMPTY, 1); + return Ec2DiscoveryTests.this.buildDynamicHosts(Settings.EMPTY, 1); } }; for (int i=0; i<3; i++) { - provider.buildDynamicNodes(); + provider.buildDynamicHosts(); } assertThat(provider.fetchCount, is(1)); Thread.sleep(1_000L); // wait for cache to expire for (int i=0; i<3; i++) { - provider.buildDynamicNodes(); + provider.buildDynamicHosts(); } assertThat(provider.fetchCount, is(2)); } diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java index 1029f907a660a..7abcb4454720c 100644 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java @@ -21,8 +21,8 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.env.Environment; @@ -58,7 +58,6 @@ class FileBasedUnicastHostsProvider extends AbstractComponent implements UnicastHostsProvider { static final String UNICAST_HOSTS_FILE = "unicast_hosts.txt"; - static final String UNICAST_HOST_PREFIX = "#zen_file_unicast_host_"; private final TransportService transportService; private final ExecutorService executorService; @@ -76,7 +75,7 @@ class FileBasedUnicastHostsProvider extends AbstractComponent implements Unicast } @Override - public List buildDynamicNodes() { + public List buildDynamicHosts() { List hostsList; try (Stream lines = Files.lines(unicastHostsFilePath)) { hostsList = lines.filter(line -> line.startsWith("#") == false) // lines starting with `#` are comments @@ -91,23 +90,22 @@ public List buildDynamicNodes() { hostsList = Collections.emptyList(); } - final List discoNodes = new ArrayList<>(); + final List dynamicHosts = new ArrayList<>(); try { - discoNodes.addAll(resolveHostsLists( + dynamicHosts.addAll(resolveHostsLists( executorService, logger, hostsList, 1, transportService, - UNICAST_HOST_PREFIX, resolveTimeout)); } catch (InterruptedException e) { throw new RuntimeException(e); } - logger.debug("[discovery-file] Using dynamic discovery nodes {}", discoNodes); + logger.debug("[discovery-file] Using dynamic discovery nodes {}", dynamicHosts); - return discoNodes; + return dynamicHosts; } } diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java index 3ddd15a7b4cf3..860d3537635d5 100644 --- a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java +++ b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.discovery.file; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -50,7 +49,6 @@ import java.util.concurrent.Executors; import static org.elasticsearch.discovery.file.FileBasedUnicastHostsProvider.UNICAST_HOSTS_FILE; -import static org.elasticsearch.discovery.file.FileBasedUnicastHostsProvider.UNICAST_HOST_PREFIX; /** * Tests for {@link FileBasedUnicastHostsProvider}. @@ -104,23 +102,20 @@ public BoundTransportAddress boundAddress() { public void testBuildDynamicNodes() throws Exception { final List hostEntries = Arrays.asList("#comment, should be ignored", "192.168.0.1", "192.168.0.2:9305", "255.255.23.15"); - final List nodes = setupAndRunHostProvider(hostEntries); + final List nodes = setupAndRunHostProvider(hostEntries); assertEquals(hostEntries.size() - 1, nodes.size()); // minus 1 because we are ignoring the first line that's a comment - assertEquals("192.168.0.1", nodes.get(0).getAddress().getAddress()); - assertEquals(9300, nodes.get(0).getAddress().getPort()); - assertEquals(UNICAST_HOST_PREFIX + "192.168.0.1_0#", nodes.get(0).getId()); - assertEquals("192.168.0.2", nodes.get(1).getAddress().getAddress()); - assertEquals(9305, nodes.get(1).getAddress().getPort()); - assertEquals(UNICAST_HOST_PREFIX + "192.168.0.2:9305_0#", nodes.get(1).getId()); - assertEquals("255.255.23.15", nodes.get(2).getAddress().getAddress()); - assertEquals(9300, nodes.get(2).getAddress().getPort()); - assertEquals(UNICAST_HOST_PREFIX + "255.255.23.15_0#", nodes.get(2).getId()); + assertEquals("192.168.0.1", nodes.get(0).getAddress()); + assertEquals(9300, nodes.get(0).getPort()); + assertEquals("192.168.0.2", nodes.get(1).getAddress()); + assertEquals(9305, nodes.get(1).getPort()); + assertEquals("255.255.23.15", nodes.get(2).getAddress()); + assertEquals(9300, nodes.get(2).getPort()); } public void testEmptyUnicastHostsFile() throws Exception { final List hostEntries = Collections.emptyList(); - final List nodes = setupAndRunHostProvider(hostEntries); - assertEquals(0, nodes.size()); + final List addresses = setupAndRunHostProvider(hostEntries); + assertEquals(0, addresses.size()); } public void testUnicastHostsDoesNotExist() throws Exception { @@ -129,27 +124,27 @@ public void testUnicastHostsDoesNotExist() throws Exception { .build(); final Environment environment = TestEnvironment.newEnvironment(settings); final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(environment, transportService, executorService); - final List nodes = provider.buildDynamicNodes(); - assertEquals(0, nodes.size()); + final List addresses = provider.buildDynamicHosts(); + assertEquals(0, addresses.size()); } public void testInvalidHostEntries() throws Exception { List hostEntries = Arrays.asList("192.168.0.1:9300:9300"); - List nodes = setupAndRunHostProvider(hostEntries); - assertEquals(0, nodes.size()); + List addresses = setupAndRunHostProvider(hostEntries); + assertEquals(0, addresses.size()); } public void testSomeInvalidHostEntries() throws Exception { List hostEntries = Arrays.asList("192.168.0.1:9300:9300", "192.168.0.1:9301"); - List nodes = setupAndRunHostProvider(hostEntries); - assertEquals(1, nodes.size()); // only one of the two is valid and will be used - assertEquals("192.168.0.1", nodes.get(0).getAddress().getAddress()); - assertEquals(9301, nodes.get(0).getAddress().getPort()); + List addresses = setupAndRunHostProvider(hostEntries); + assertEquals(1, addresses.size()); // only one of the two is valid and will be used + assertEquals("192.168.0.1", addresses.get(0).getAddress()); + assertEquals(9301, addresses.get(0).getPort()); } // sets up the config dir, writes to the unicast hosts file in the config dir, // and then runs the file-based unicast host provider to get the list of discovery nodes - private List setupAndRunHostProvider(final List hostEntries) throws IOException { + private List setupAndRunHostProvider(final List hostEntries) throws IOException { final Path homeDir = createTempDir(); final Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), homeDir) @@ -168,6 +163,6 @@ private List setupAndRunHostProvider(final List hostEntri } return new FileBasedUnicastHostsProvider( - new Environment(settings, configPath), transportService, executorService).buildDynamicNodes(); + new Environment(settings, configPath), transportService, executorService).buildDynamicHosts(); } } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index de290245895d2..790d70a8b99b0 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -31,9 +31,7 @@ import com.google.api.services.compute.model.NetworkInterface; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.Version; import org.elasticsearch.cloud.gce.GceInstancesService; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.network.NetworkAddress; @@ -47,8 +45,6 @@ import org.elasticsearch.transport.TransportService; import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; public class GceUnicastHostsProvider extends AbstractComponent implements UnicastHostsProvider { @@ -72,7 +68,7 @@ static final class Status { private final TimeValue refreshInterval; private long lastRefresh; - private List cachedDiscoNodes; + private List cachedDynamicHosts; public GceUnicastHostsProvider(Settings settings, GceInstancesService gceInstancesService, TransportService transportService, @@ -97,7 +93,7 @@ public GceUnicastHostsProvider(Settings settings, GceInstancesService gceInstanc * Information can be cached using `cloud.gce.refresh_interval` property if needed. */ @Override - public List buildDynamicNodes() { + public List buildDynamicHosts() { // We check that needed properties have been set if (this.project == null || this.project.isEmpty() || this.zones == null || this.zones.isEmpty()) { throw new IllegalArgumentException("one or more gce discovery settings are missing. " + @@ -106,16 +102,16 @@ public List buildDynamicNodes() { } if (refreshInterval.millis() != 0) { - if (cachedDiscoNodes != null && + if (cachedDynamicHosts != null && (refreshInterval.millis() < 0 || (System.currentTimeMillis() - lastRefresh) < refreshInterval.millis())) { if (logger.isTraceEnabled()) logger.trace("using cache to retrieve node list"); - return cachedDiscoNodes; + return cachedDynamicHosts; } lastRefresh = System.currentTimeMillis(); } logger.debug("start building nodes list using GCE API"); - cachedDiscoNodes = new ArrayList<>(); + cachedDynamicHosts = new ArrayList<>(); String ipAddress = null; try { InetAddress inetAddress = networkService.resolvePublishHostAddresses( @@ -133,7 +129,7 @@ public List buildDynamicNodes() { if (instances == null) { logger.trace("no instance found for project [{}], zones [{}].", this.project, this.zones); - return cachedDiscoNodes; + return cachedDynamicHosts; } for (Instance instance : instances) { @@ -238,8 +234,7 @@ public List buildDynamicNodes() { for (TransportAddress transportAddress : addresses) { logger.trace("adding {}, type {}, address {}, transport_address {}, status {}", name, type, ip_private, transportAddress, status); - cachedDiscoNodes.add(new DiscoveryNode("#cloud-" + name + "-" + 0, transportAddress, - emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion())); + cachedDynamicHosts.add(transportAddress); } } } catch (Exception e) { @@ -252,9 +247,9 @@ public List buildDynamicNodes() { logger.warn("exception caught during discovery", e); } - logger.debug("{} node(s) added", cachedDiscoNodes.size()); - logger.debug("using dynamic discovery nodes {}", cachedDiscoNodes); + logger.debug("{} addresses added", cachedDynamicHosts.size()); + logger.debug("using transport addresses {}", cachedDynamicHosts); - return cachedDiscoNodes; + return cachedDynamicHosts; } } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java index 31ea9bdb1c21e..a1944a15d8036 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java @@ -21,9 +21,9 @@ import org.elasticsearch.Version; import org.elasticsearch.cloud.gce.GceInstancesServiceImpl; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; @@ -40,7 +40,6 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; /** * This test class uses a GCE HTTP Mock system which allows to simulate JSON Responses. @@ -105,13 +104,13 @@ public void stopGceComputeService() throws IOException { } } - protected List buildDynamicNodes(GceInstancesServiceImpl gceInstancesService, Settings nodeSettings) { + protected List buildDynamicNodes(GceInstancesServiceImpl gceInstancesService, Settings nodeSettings) { GceUnicastHostsProvider provider = new GceUnicastHostsProvider(nodeSettings, gceInstancesService, transportService, new NetworkService(Collections.emptyList())); - List discoveryNodes = provider.buildDynamicNodes(); - logger.info("--> nodes found: {}", discoveryNodes); - return discoveryNodes; + List dynamicHosts = provider.buildDynamicHosts(); + logger.info("--> addresses found: {}", dynamicHosts); + return dynamicHosts; } public void testNodesWithDifferentTagsAndNoTagSet() { @@ -120,8 +119,8 @@ public void testNodesWithDifferentTagsAndNoTagSet() { .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(2)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); } public void testNodesWithDifferentTagsAndOneTagSet() { @@ -131,9 +130,8 @@ public void testNodesWithDifferentTagsAndOneTagSet() { .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(1)); - assertThat(discoveryNodes.get(0).getId(), is("#cloud-test2-0")); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(1)); } public void testNodesWithDifferentTagsAndTwoTagSet() { @@ -143,9 +141,8 @@ public void testNodesWithDifferentTagsAndTwoTagSet() { .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(1)); - assertThat(discoveryNodes.get(0).getId(), is("#cloud-test2-0")); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(1)); } public void testNodesWithSameTagsAndNoTagSet() { @@ -154,8 +151,8 @@ public void testNodesWithSameTagsAndNoTagSet() { .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(2)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); } public void testNodesWithSameTagsAndOneTagSet() { @@ -165,8 +162,8 @@ public void testNodesWithSameTagsAndOneTagSet() { .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(2)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); } public void testNodesWithSameTagsAndTwoTagsSet() { @@ -176,8 +173,8 @@ public void testNodesWithSameTagsAndTwoTagsSet() { .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(2)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); } public void testMultipleZonesAndTwoNodesInSameZone() { @@ -186,8 +183,8 @@ public void testMultipleZonesAndTwoNodesInSameZone() { .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(2)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); } public void testMultipleZonesAndTwoNodesInDifferentZones() { @@ -196,8 +193,8 @@ public void testMultipleZonesAndTwoNodesInDifferentZones() { .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(2)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); } /** @@ -209,8 +206,8 @@ public void testZeroNode43() { .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(0)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(0)); } public void testIllegalSettingsMissingAllRequired() { @@ -261,7 +258,7 @@ public void testNoRegionReturnsEmptyList() { .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a") .build(); mock = new GceInstancesServiceMock(nodeSettings); - List discoveryNodes = buildDynamicNodes(mock, nodeSettings); - assertThat(discoveryNodes, hasSize(1)); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(1)); } } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java index 9ff3215cd6480..d719f9d123b8c 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java @@ -19,7 +19,7 @@ package org.elasticsearch.discovery.zen; -import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.transport.TransportAddress; import java.util.List; @@ -31,5 +31,5 @@ public interface UnicastHostsProvider { /** * Builds the dynamic list of unicast hosts to be used for unicast discovery. */ - List buildDynamicNodes(); + List buildDynamicHosts(); } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index e9ac1deec0ab4..cbadbb4a1e09b 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -118,9 +118,6 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { private final AtomicInteger pingingRoundIdGenerator = new AtomicInteger(); - // used as a node id prefix for configured unicast host nodes/address - private static final String UNICAST_NODE_PREFIX = "#zen_unicast_"; - private final Map activePingingRounds = newConcurrentMap(); // a list of temporal responses a node will return for a request (holds responses from other nodes) @@ -184,23 +181,20 @@ public UnicastZenPing(Settings settings, ThreadPool threadPool, TransportService * @param hosts the hosts to resolve * @param limitPortCounts the number of ports to resolve (should be 1 for non-local transport) * @param transportService the transport service - * @param nodeId_prefix a prefix to use for node ids * @param resolveTimeout the timeout before returning from hostname lookups - * @return a list of discovery nodes with resolved transport addresses + * @return a list of resolved transport addresses */ - public static List resolveHostsLists( + public static List resolveHostsLists( final ExecutorService executorService, final Logger logger, final List hosts, final int limitPortCounts, final TransportService transportService, - final String nodeId_prefix, final TimeValue resolveTimeout) throws InterruptedException { Objects.requireNonNull(executorService); Objects.requireNonNull(logger); Objects.requireNonNull(hosts); Objects.requireNonNull(transportService); - Objects.requireNonNull(nodeId_prefix); Objects.requireNonNull(resolveTimeout); if (resolveTimeout.nanos() < 0) { throw new IllegalArgumentException("resolve timeout must be non-negative but was [" + resolveTimeout + "]"); @@ -213,7 +207,7 @@ public static List resolveHostsLists( .collect(Collectors.toList()); final List> futures = executorService.invokeAll(callables, resolveTimeout.nanos(), TimeUnit.NANOSECONDS); - final List discoveryNodes = new ArrayList<>(); + final List transportAddresses = new ArrayList<>(); final Set localAddresses = new HashSet<>(); localAddresses.add(transportService.boundAddress().publishAddress()); localAddresses.addAll(Arrays.asList(transportService.boundAddress().boundAddresses())); @@ -231,13 +225,7 @@ public static List resolveHostsLists( final TransportAddress address = addresses[addressId]; // no point in pinging ourselves if (localAddresses.contains(address) == false) { - discoveryNodes.add( - new DiscoveryNode( - nodeId_prefix + hostname + "_" + addressId + "#", - address, - emptyMap(), - emptySet(), - Version.CURRENT.minimumCompatibilityVersion())); + transportAddresses.add(address); } } } catch (final ExecutionException e) { @@ -249,7 +237,7 @@ public static List resolveHostsLists( logger.warn("timed out after [{}] resolving host [{}]", resolveTimeout, hostname); } } - return discoveryNodes; + return Collections.unmodifiableList(transportAddresses); } @Override @@ -292,29 +280,28 @@ public void ping(final Consumer resultsConsumer, final TimeValue protected void ping(final Consumer resultsConsumer, final TimeValue scheduleDuration, final TimeValue requestDuration) { - final List seedNodes; + final List seedAddresses = new ArrayList<>(); try { - seedNodes = resolveHostsLists( + seedAddresses.addAll(resolveHostsLists( unicastZenPingExecutorService, logger, configuredHosts, limitPortCounts, transportService, - UNICAST_NODE_PREFIX, - resolveTimeout); + resolveTimeout)); } catch (InterruptedException e) { throw new RuntimeException(e); } - seedNodes.addAll(hostsProvider.buildDynamicNodes()); + seedAddresses.addAll(hostsProvider.buildDynamicHosts()); final DiscoveryNodes nodes = contextProvider.clusterState().nodes(); // add all possible master nodes that were active in the last known cluster configuration for (ObjectCursor masterNode : nodes.getMasterNodes().values()) { - seedNodes.add(masterNode.value); + seedAddresses.add(masterNode.value.getAddress()); } final ConnectionProfile connectionProfile = ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, requestDuration, requestDuration); - final PingingRound pingingRound = new PingingRound(pingingRoundIdGenerator.incrementAndGet(), seedNodes, resultsConsumer, + final PingingRound pingingRound = new PingingRound(pingingRoundIdGenerator.incrementAndGet(), seedAddresses, resultsConsumer, nodes.getLocalNode(), connectionProfile); activePingingRounds.put(pingingRound.id(), pingingRound); final AbstractRunnable pingSender = new AbstractRunnable() { @@ -356,17 +343,17 @@ protected class PingingRound implements Releasable { private final Map tempConnections = new HashMap<>(); private final KeyedLock connectionLock = new KeyedLock<>(true); private final PingCollection pingCollection; - private final List seedNodes; + private final List seedAddresses; private final Consumer pingListener; private final DiscoveryNode localNode; private final ConnectionProfile connectionProfile; private AtomicBoolean closed = new AtomicBoolean(false); - PingingRound(int id, List seedNodes, Consumer resultsConsumer, DiscoveryNode localNode, + PingingRound(int id, List seedAddresses, Consumer resultsConsumer, DiscoveryNode localNode, ConnectionProfile connectionProfile) { this.id = id; - this.seedNodes = Collections.unmodifiableList(new ArrayList<>(seedNodes)); + this.seedAddresses = Collections.unmodifiableList(seedAddresses.stream().distinct().collect(Collectors.toList())); this.pingListener = resultsConsumer; this.localNode = localNode; this.connectionProfile = connectionProfile; @@ -381,9 +368,9 @@ public boolean isClosed() { return this.closed.get(); } - public List getSeedNodes() { + public List getSeedAddresses() { ensureOpen(); - return seedNodes; + return seedAddresses; } public Connection getOrConnect(DiscoveryNode node) throws IOException { @@ -457,26 +444,28 @@ protected void sendPings(final TimeValue timeout, final PingingRound pingingRoun final ClusterState lastState = contextProvider.clusterState(); final UnicastPingRequest pingRequest = new UnicastPingRequest(pingingRound.id(), timeout, createPingResponse(lastState)); - Set nodesFromResponses = temporalResponses.stream().map(pingResponse -> { + List temporalAddresses = temporalResponses.stream().map(pingResponse -> { assert clusterName.equals(pingResponse.clusterName()) : "got a ping request from a different cluster. expected " + clusterName + " got " + pingResponse.clusterName(); - return pingResponse.node(); - }).collect(Collectors.toSet()); - - // dedup by address - final Map uniqueNodesByAddress = - Stream.concat(pingingRound.getSeedNodes().stream(), nodesFromResponses.stream()) - .collect(Collectors.toMap(DiscoveryNode::getAddress, Function.identity(), (n1, n2) -> n1)); + return pingResponse.node().getAddress(); + }).collect(Collectors.toList()); + final Stream uniqueAddresses = Stream.concat(pingingRound.getSeedAddresses().stream(), + temporalAddresses.stream()).distinct(); // resolve what we can via the latest cluster state - final Set nodesToPing = uniqueNodesByAddress.values().stream() - .map(node -> { - DiscoveryNode foundNode = lastState.nodes().findByAddress(node.getAddress()); - if (foundNode == null) { - return node; - } else { + final Set nodesToPing = uniqueAddresses + .map(address -> { + DiscoveryNode foundNode = lastState.nodes().findByAddress(address); + if (foundNode != null && transportService.nodeConnected(foundNode)) { return foundNode; + } else { + return new DiscoveryNode( + address.toString(), + address, + emptyMap(), + emptySet(), + Version.CURRENT.minimumCompatibilityVersion()); } }).collect(Collectors.toSet()); diff --git a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java index fdc36152cc895..33c87ea7f383e 100644 --- a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java @@ -84,7 +84,7 @@ public void testDoesNotRespondToZenPings() throws Exception { internalCluster().getInstance(TransportService.class); // try to ping the single node directly final UnicastHostsProvider provider = - () -> Collections.singletonList(nodeTransport.getLocalNode()); + () -> Collections.singletonList(nodeTransport.getLocalNode().getAddress()); final CountDownLatch latch = new CountDownLatch(1); final DiscoveryNodes nodes = DiscoveryNodes.builder() .add(nodeTransport.getLocalNode()) diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index f71ffe28b50f6..4aa75077431e7 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -408,19 +408,18 @@ public BoundTransportAddress boundAddress() { Collections.emptySet()); closeables.push(transportService); final int limitPortCounts = randomIntBetween(1, 10); - final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = TestUnicastZenPing.resolveHostsLists( executorService, logger, Collections.singletonList("127.0.0.1"), limitPortCounts, transportService, - "test_", TimeValue.timeValueSeconds(1)); - assertThat(discoveryNodes, hasSize(limitPortCounts)); + assertThat(transportAddresses, hasSize(limitPortCounts)); final Set ports = new HashSet<>(); - for (final DiscoveryNode discoveryNode : discoveryNodes) { - assertTrue(discoveryNode.getAddress().address().getAddress().isLoopbackAddress()); - ports.add(discoveryNode.getAddress().getPort()); + for (final TransportAddress address : transportAddresses) { + assertTrue(address.address().getAddress().isLoopbackAddress()); + ports.add(address.getPort()); } assertThat(ports, equalTo(IntStream.range(9300, 9300 + limitPortCounts).mapToObj(m -> m).collect(Collectors.toSet()))); } @@ -453,19 +452,18 @@ public BoundTransportAddress boundAddress() { new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); closeables.push(transportService); - final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = TestUnicastZenPing.resolveHostsLists( executorService, logger, Collections.singletonList(NetworkAddress.format(loopbackAddress)), 10, transportService, - "test_", TimeValue.timeValueSeconds(1)); - assertThat(discoveryNodes, hasSize(7)); + assertThat(transportAddresses, hasSize(7)); final Set ports = new HashSet<>(); - for (final DiscoveryNode discoveryNode : discoveryNodes) { - assertTrue(discoveryNode.getAddress().address().getAddress().isLoopbackAddress()); - ports.add(discoveryNode.getAddress().getPort()); + for (final TransportAddress address : transportAddresses) { + assertTrue(address.address().getAddress().isLoopbackAddress()); + ports.add(address.getPort()); } assertThat(ports, equalTo(IntStream.range(9303, 9310).mapToObj(m -> m).collect(Collectors.toSet()))); } @@ -505,17 +503,16 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi Collections.emptySet()); closeables.push(transportService); - final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = TestUnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList(hostname), 1, transportService, - "test_", TimeValue.timeValueSeconds(1) ); - assertThat(discoveryNodes, empty()); + assertThat(transportAddresses, empty()); verify(logger).warn("failed to resolve host [" + hostname + "]", unknownHostException); } @@ -565,16 +562,15 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi closeables.push(transportService); final TimeValue resolveTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 3)); try { - final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = TestUnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList("hostname1", "hostname2"), 1, transportService, - "test+", resolveTimeout); - assertThat(discoveryNodes, hasSize(1)); + assertThat(transportAddresses, hasSize(1)); verify(logger).trace( "resolved host [{}] to {}", "hostname1", new TransportAddress[]{new TransportAddress(TransportAddress.META_ADDRESS, 9300)}); @@ -732,17 +728,16 @@ public BoundTransportAddress boundAddress() { new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); closeables.push(transportService); - final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = TestUnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList("127.0.0.1:9300:9300", "127.0.0.1:9301"), 1, transportService, - "test_", TimeValue.timeValueSeconds(1)); - assertThat(discoveryNodes, hasSize(1)); // only one of the two is valid and will be used - assertThat(discoveryNodes.get(0).getAddress().getAddress(), equalTo("127.0.0.1")); - assertThat(discoveryNodes.get(0).getAddress().getPort(), equalTo(9301)); + assertThat(transportAddresses, hasSize(1)); // only one of the two is valid and will be used + assertThat(transportAddresses.get(0).getAddress(), equalTo("127.0.0.1")); + assertThat(transportAddresses.get(0).getPort(), equalTo(9301)); verify(logger).warn(eq("failed to resolve host [127.0.0.1:9300:9300]"), Matchers.any(ExecutionException.class)); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java index 46bbdcc7646c4..2e60a3c518dd3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.discovery.zen.UnicastHostsProvider; @@ -55,7 +56,7 @@ public MockUncasedHostProvider(Supplier localNodeSupplier, Cluste } @Override - public List buildDynamicNodes() { + public List buildDynamicHosts() { final DiscoveryNode localNode = getNode(); assert localNode != null; synchronized (activeNodesPerCluster) { @@ -64,6 +65,7 @@ public List buildDynamicNodes() { .map(MockUncasedHostProvider::getNode) .filter(Objects::nonNull) .filter(n -> localNode.equals(n) == false) + .map(DiscoveryNode::getAddress) .collect(Collectors.toList()); } } From 0a324b9943758bb976f93c99031ce517507348f0 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 21 Jun 2018 07:59:55 -0700 Subject: [PATCH 05/34] Core: Convert TransportAction.execute uses to client calls (#31487) This commit converts some of the existing calls to TransportAction.execute to use the equivalent client method for the desired action. --- .../TransportMultiSearchTemplateAction.java | 10 ++-- .../TransportSearchTemplateAction.java | 14 +++--- .../indices/create/CreateIndexResponse.java | 3 +- .../upgrade/post/TransportUpgradeAction.java | 10 ++-- .../action/bulk/TransportBulkAction.java | 19 +++---- .../ingest/PutPipelineTransportAction.java | 10 ++-- .../search/TransportMultiSearchAction.java | 16 +++--- .../action/update/TransportUpdateAction.java | 22 ++++---- .../tasks/TaskResultsService.java | 9 +--- .../bulk/TransportBulkActionTookTests.java | 50 ++++++------------- .../search/MultiSearchActionTookTests.java | 16 +++--- .../TransportMultiSearchActionTests.java | 12 ++--- .../action/TransportGraphExploreAction.java | 12 ++--- 13 files changed, 83 insertions(+), 120 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java index 3e80b0f247883..7451c89cdb494 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java @@ -23,9 +23,9 @@ import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.TransportMultiSearchAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -42,16 +42,16 @@ public class TransportMultiSearchTemplateAction extends HandledTransportAction { + client.multiSearch(multiSearchRequest, ActionListener.wrap(r -> { for (int i = 0; i < r.getResponses().length; i++) { MultiSearchResponse.Item item = r.getResponses()[i]; int originalSlot = originalSlots.get(i); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index a910ec384ee12..c241678cc5f44 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -22,9 +22,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -50,20 +50,18 @@ public class TransportSearchTemplateAction extends HandledTransportAction) SearchTemplateRequest::new); this.scriptService = scriptService; - this.searchAction = searchAction; this.xContentRegistry = xContentRegistry; + this.client = client; } @Override @@ -72,7 +70,7 @@ protected void doExecute(SearchTemplateRequest request, ActionListener() { + client.search(searchRequest, new ActionListener() { @Override public void onResponse(SearchResponse searchResponse) { try { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java index 4e3a5685bda13..c858d0bb10651 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java @@ -55,8 +55,7 @@ protected static void declareFields(Constructing private String index; - protected CreateIndexResponse() { - } + public CreateIndexResponse() {} protected CreateIndexResponse(boolean acknowledged, boolean shardsAcknowledged, String index) { super(acknowledged, shardsAcknowledged); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java index 67e51c8e5575c..0bc2134cb505a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -58,16 +59,15 @@ public class TransportUpgradeAction extends TransportBroadcastByNodeAction { private final IndicesService indicesService; - - private final TransportUpgradeSettingsAction upgradeSettingsAction; + private final NodeClient client; @Inject public TransportUpgradeAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, TransportUpgradeSettingsAction upgradeSettingsAction) { + IndexNameExpressionResolver indexNameExpressionResolver, NodeClient client) { super(settings, UpgradeAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, UpgradeRequest::new, ThreadPool.Names.FORCE_MERGE); this.indicesService = indicesService; - this.upgradeSettingsAction = upgradeSettingsAction; + this.client = client; } @Override @@ -205,7 +205,7 @@ public void onFailure(Exception e) { private void updateSettings(final UpgradeResponse upgradeResponse, final ActionListener listener) { UpgradeSettingsRequest upgradeSettingsRequest = new UpgradeSettingsRequest(upgradeResponse.versions()); - upgradeSettingsAction.execute(upgradeSettingsRequest, new ActionListener() { + client.executeLocally(UpgradeSettingsAction.INSTANCE, upgradeSettingsRequest, new ActionListener() { @Override public void onResponse(UpgradeSettingsResponse updateSettingsResponse) { listener.onResponse(upgradeResponse); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 2fcf30b3ae8a9..153a7d8d45a7b 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.IngestActionForwarder; import org.elasticsearch.action.support.ActionFilters; @@ -38,6 +37,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.update.TransportUpdateAction; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -88,27 +88,24 @@ public class TransportBulkAction extends HandledTransportAction responses, int idx, DocWriteRequest request, String index, Exception e) { diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 7dde981804939..17af73c167704 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -23,9 +23,9 @@ import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -47,16 +47,16 @@ public class PutPipelineTransportAction extends TransportMasterNodeAction() { + client.admin().cluster().nodesInfo(nodesInfoRequest, new ActionListener() { @Override public void onResponse(NodesInfoResponse nodeInfos) { try { diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index 89367f71ef38b..ce35c1e94f83a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.service.ClusterService; @@ -43,27 +43,27 @@ public class TransportMultiSearchAction extends HandledTransportAction searchAction; private final LongSupplier relativeTimeProvider; + private final NodeClient client; @Inject public TransportMultiSearchAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ClusterService clusterService, TransportSearchAction searchAction, ActionFilters actionFilters) { + ClusterService clusterService, ActionFilters actionFilters, NodeClient client) { super(settings, MultiSearchAction.NAME, threadPool, transportService, actionFilters, MultiSearchRequest::new); this.clusterService = clusterService; - this.searchAction = searchAction; this.availableProcessors = EsExecutors.numberOfProcessors(settings); this.relativeTimeProvider = System::nanoTime; + this.client = client; } TransportMultiSearchAction(ThreadPool threadPool, ActionFilters actionFilters, TransportService transportService, - ClusterService clusterService, TransportAction searchAction, - int availableProcessors, LongSupplier relativeTimeProvider) { + ClusterService clusterService, int availableProcessors, + LongSupplier relativeTimeProvider, NodeClient client) { super(Settings.EMPTY, MultiSearchAction.NAME, threadPool, transportService, actionFilters, MultiSearchRequest::new); this.clusterService = clusterService; - this.searchAction = searchAction; this.availableProcessors = availableProcessors; this.relativeTimeProvider = relativeTimeProvider; + this.client = client; } @Override @@ -141,7 +141,7 @@ void executeSearch( * when we handle the response rather than going recursive, we fork to another thread, otherwise we recurse. */ final Thread thread = Thread.currentThread(); - searchAction.execute(request.request, new ActionListener() { + client.search(request.request, new ActionListener() { @Override public void onResponse(final SearchResponse searchResponse) { handleResponse(request.responseSlot, new MultiSearchResponse.Item(searchResponse, null)); diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 91911129dfac7..9faf22d464cbb 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -24,8 +24,6 @@ import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; -import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; @@ -34,6 +32,7 @@ import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.instance.TransportInstanceSingleOperationAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; @@ -66,22 +65,21 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationAction { - private final TransportBulkAction bulkAction; private final AutoCreateIndex autoCreateIndex; - private final TransportCreateIndexAction createIndexAction; private final UpdateHelper updateHelper; private final IndicesService indicesService; + private final NodeClient client; @Inject public TransportUpdateAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, - TransportBulkAction bulkAction, TransportCreateIndexAction createIndexAction, UpdateHelper updateHelper, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService, AutoCreateIndex autoCreateIndex) { + UpdateHelper updateHelper, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService, + AutoCreateIndex autoCreateIndex, NodeClient client) { super(settings, UpdateAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, UpdateRequest::new); - this.bulkAction = bulkAction; - this.createIndexAction = createIndexAction; this.updateHelper = updateHelper; this.indicesService = indicesService; this.autoCreateIndex = autoCreateIndex; + this.client = client; } @Override @@ -116,7 +114,7 @@ public static void resolveAndValidateRouting(MetaData metaData, String concreteI protected void doExecute(final UpdateRequest request, final ActionListener listener) { // if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) { - createIndexAction.execute(new CreateIndexRequest().index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener() { + client.admin().indices().create(new CreateIndexRequest().index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener() { @Override public void onResponse(CreateIndexResponse result) { innerExecute(request, listener); @@ -177,7 +175,7 @@ protected void shardOperation(final UpdateRequest request, final ActionListener< IndexRequest upsertRequest = result.action(); // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request final BytesReference upsertSourceBytes = upsertRequest.source(); - bulkAction.execute(toSingleItemBulkRequest(upsertRequest), wrapBulkResponse( + client.bulk(toSingleItemBulkRequest(upsertRequest), wrapBulkResponse( ActionListener.wrap(response -> { UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult()); if (request.fetchSource() != null && request.fetchSource().fetchSource()) { @@ -197,7 +195,7 @@ protected void shardOperation(final UpdateRequest request, final ActionListener< IndexRequest indexRequest = result.action(); // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request final BytesReference indexSourceBytes = indexRequest.source(); - bulkAction.execute(toSingleItemBulkRequest(indexRequest), wrapBulkResponse( + client.bulk(toSingleItemBulkRequest(indexRequest), wrapBulkResponse( ActionListener.wrap(response -> { UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult()); update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes)); @@ -208,7 +206,7 @@ protected void shardOperation(final UpdateRequest request, final ActionListener< break; case DELETED: DeleteRequest deleteRequest = result.action(); - bulkAction.execute(toSingleItemBulkRequest(deleteRequest), wrapBulkResponse( + client.bulk(toSingleItemBulkRequest(deleteRequest), wrapBulkResponse( ActionListener.wrap(response -> { UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult()); update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null)); diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java index 6ec949a0c918b..b94902132fea2 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; @@ -69,15 +68,11 @@ public class TaskResultsService extends AbstractComponent { private final ClusterService clusterService; - private final TransportCreateIndexAction createIndexAction; - @Inject - public TaskResultsService(Settings settings, Client client, ClusterService clusterService, - TransportCreateIndexAction createIndexAction) { + public TaskResultsService(Settings settings, Client client, ClusterService clusterService) { super(settings); this.client = client; this.clusterService = clusterService; - this.createIndexAction = createIndexAction; } public void storeResult(TaskResult taskResult, ActionListener listener) { @@ -91,7 +86,7 @@ public void storeResult(TaskResult taskResult, ActionListener listener) { createIndexRequest.mapping(TASK_TYPE, taskResultIndexMapping(), XContentType.JSON); createIndexRequest.cause("auto(task api)"); - createIndexAction.execute(null, createIndexRequest, new ActionListener() { + client.admin().indices().create(createIndexRequest, new ActionListener() { @Override public void onResponse(CreateIndexResponse result) { doStoreResult(taskResult, listener); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index af8289f0c45b1..9d5193180299d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -21,16 +21,17 @@ package org.elasticsearch.action.bulk; import org.apache.lucene.util.Constants; +import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -99,14 +100,13 @@ private TransportBulkAction createAction(boolean controlled, AtomicLong expected IndexNameExpressionResolver resolver = new Resolver(Settings.EMPTY); ActionFilters actionFilters = new ActionFilters(new HashSet<>()); - TransportCreateIndexAction createIndexAction = new TransportCreateIndexAction( - Settings.EMPTY, - transportService, - clusterService, - threadPool, - null, - actionFilters, - resolver); + NodeClient client = new NodeClient(Settings.EMPTY, threadPool) { + @Override + public + void doExecute(Action action, Request request, ActionListener listener) { + listener.onResponse((Response)new CreateIndexResponse()); + } + }; if (controlled) { @@ -116,7 +116,7 @@ private TransportBulkAction createAction(boolean controlled, AtomicLong expected transportService, clusterService, null, - createIndexAction, + client, actionFilters, resolver, null, @@ -141,7 +141,7 @@ void executeBulk( transportService, clusterService, null, - createIndexAction, + client, actionFilters, resolver, null, @@ -223,7 +223,7 @@ static class TestTransportBulkAction extends TransportBulkAction { TransportService transportService, ClusterService clusterService, TransportShardBulkAction shardBulkAction, - TransportCreateIndexAction createIndexAction, + NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, AutoCreateIndex autoCreateIndex, @@ -235,7 +235,7 @@ static class TestTransportBulkAction extends TransportBulkAction { clusterService, null, shardBulkAction, - createIndexAction, + client, actionFilters, indexNameExpressionResolver, autoCreateIndex, @@ -253,24 +253,4 @@ boolean shouldAutoCreate(String index, ClusterState state) { } } - - static class TestTransportCreateIndexAction extends TransportCreateIndexAction { - - TestTransportCreateIndexAction( - Settings settings, - TransportService transportService, - ClusterService clusterService, - ThreadPool threadPool, - MetaDataCreateIndexService createIndexService, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, transportService, clusterService, threadPool, createIndexService, actionFilters, indexNameExpressionResolver); - } - - @Override - protected void doExecute(Task task, CreateIndexRequest request, ActionListener listener) { - listener.onResponse(newResponse()); - } - } - } diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index 39e9ec805e070..94bc6b01ec168 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -148,10 +148,9 @@ public TaskManager getTaskManager() { final ExecutorService commonExecutor = threadPool.executor(threadPoolNames.get(0)); final Set requests = Collections.newSetFromMap(Collections.synchronizedMap(new IdentityHashMap<>())); - TransportAction searchAction = new TransportAction(Settings.EMPTY, - "action", threadPool, actionFilters, taskManager) { + NodeClient client = new NodeClient(settings, threadPool) { @Override - protected void doExecute(SearchRequest request, ActionListener listener) { + public void search(final SearchRequest request, final ActionListener listener) { requests.add(request); commonExecutor.execute(() -> { counter.decrementAndGet(); @@ -161,8 +160,8 @@ protected void doExecute(SearchRequest request, ActionListener l }; if (controlledClock) { - return new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, - availableProcessors, expected::get) { + return new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, availableProcessors, + expected::get, client) { @Override void executeSearch(final Queue requests, final AtomicArray responses, final AtomicInteger responseCounter, final ActionListener listener, long startTimeInNanos) { @@ -171,9 +170,8 @@ void executeSearch(final Queue requests, final AtomicArray requests, final AtomicArray responses, final AtomicInteger responseCounter, final ActionListener listener, long startTimeInNanos) { diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java index 26d5cf2cc14be..a43584a4130e4 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionTestUtils; -import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -107,15 +107,14 @@ public TaskManager getTaskManager() { final ExecutorService commonExecutor = threadPool.executor(threadPoolNames.get(0)); final ExecutorService rarelyExecutor = threadPool.executor(threadPoolNames.get(1)); final Set requests = Collections.newSetFromMap(Collections.synchronizedMap(new IdentityHashMap<>())); - TransportAction searchAction = new TransportAction - (Settings.EMPTY, "action", threadPool, actionFilters, taskManager) { + NodeClient client = new NodeClient(settings, threadPool) { @Override - protected void doExecute(SearchRequest request, ActionListener listener) { + public void search(final SearchRequest request, final ActionListener listener) { requests.add(request); int currentConcurrentSearches = counter.incrementAndGet(); if (currentConcurrentSearches > maxAllowedConcurrentSearches) { errorHolder.set(new AssertionError("Current concurrent search [" + currentConcurrentSearches + - "] is higher than is allowed [" + maxAllowedConcurrentSearches + "]")); + "] is higher than is allowed [" + maxAllowedConcurrentSearches + "]")); } final ExecutorService executorService = rarely() ? rarelyExecutor : commonExecutor; executorService.execute(() -> { @@ -126,8 +125,7 @@ protected void doExecute(SearchRequest request, ActionListener l }; TransportMultiSearchAction action = - new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, searchAction, 10, - System::nanoTime); + new TransportMultiSearchAction(threadPool, actionFilters, transportService, clusterService, 10, System::nanoTime, client); // Execute the multi search api and fail if we find an error after executing: try { diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 07035967d2abf..1c1dfb476da7d 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -13,9 +13,9 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -65,7 +65,7 @@ */ public class TransportGraphExploreAction extends HandledTransportAction { - private final TransportSearchAction searchAction; + private final NodeClient client; protected final XPackLicenseState licenseState; static class VertexPriorityQueue extends PriorityQueue { @@ -82,12 +82,12 @@ protected boolean lessThan(Vertex a, Vertex b) { } @Inject - public TransportGraphExploreAction(Settings settings, ThreadPool threadPool, TransportSearchAction transportSearchAction, + public TransportGraphExploreAction(Settings settings, ThreadPool threadPool, NodeClient client, TransportService transportService, ActionFilters actionFilters, XPackLicenseState licenseState) { super(settings, GraphExploreAction.NAME, threadPool, transportService, actionFilters, (Supplier)GraphExploreRequest::new); - this.searchAction = transportSearchAction; + this.client = client; this.licenseState = licenseState; } @@ -313,7 +313,7 @@ synchronized void expand() { // System.out.println(source); logger.trace("executing expansion graph search request"); - searchAction.execute(searchRequest, new ActionListener() { + client.search(searchRequest, new ActionListener() { @Override public void onResponse(SearchResponse searchResponse) { // System.out.println(searchResponse); @@ -660,7 +660,7 @@ public synchronized void start() { searchRequest.source(source); // System.out.println(source); logger.trace("executing initial graph search request"); - searchAction.execute(searchRequest, new ActionListener() { + client.search(searchRequest, new ActionListener() { @Override public void onResponse(SearchResponse searchResponse) { addShardFailures(searchResponse.getShardFailures()); From 68ec9588737d97c2a9282308fce10ac6f3cf03c7 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 21 Jun 2018 08:19:23 -0700 Subject: [PATCH 06/34] [DOCS] Move migration APIs to docs (#31473) --- .../reference/migration/apis}/assistance.asciidoc | 1 + .../reference/migration/apis}/deprecation.asciidoc | 1 + .../reference/migration/apis}/upgrade.asciidoc | 1 + .../reference/migration}/migration.asciidoc | 7 ++++--- docs/reference/rest-api/index.asciidoc | 2 +- 5 files changed, 8 insertions(+), 4 deletions(-) rename {x-pack/docs/en/rest-api/migration => docs/reference/migration/apis}/assistance.asciidoc (99%) rename {x-pack/docs/en/rest-api/migration => docs/reference/migration/apis}/deprecation.asciidoc (99%) rename {x-pack/docs/en/rest-api/migration => docs/reference/migration/apis}/upgrade.asciidoc (99%) rename {x-pack/docs/en/rest-api => docs/reference/migration}/migration.asciidoc (64%) diff --git a/x-pack/docs/en/rest-api/migration/assistance.asciidoc b/docs/reference/migration/apis/assistance.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/migration/assistance.asciidoc rename to docs/reference/migration/apis/assistance.asciidoc index 1af625a97ecff..ae9972cc062bc 100644 --- a/x-pack/docs/en/rest-api/migration/assistance.asciidoc +++ b/docs/reference/migration/apis/assistance.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[migration-api-assistance]] === Migration Assistance API diff --git a/x-pack/docs/en/rest-api/migration/deprecation.asciidoc b/docs/reference/migration/apis/deprecation.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/migration/deprecation.asciidoc rename to docs/reference/migration/apis/deprecation.asciidoc index 54feee7903af8..a1f0517b82757 100644 --- a/x-pack/docs/en/rest-api/migration/deprecation.asciidoc +++ b/docs/reference/migration/apis/deprecation.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[migration-api-deprecation]] === Deprecation Info APIs diff --git a/x-pack/docs/en/rest-api/migration/upgrade.asciidoc b/docs/reference/migration/apis/upgrade.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/migration/upgrade.asciidoc rename to docs/reference/migration/apis/upgrade.asciidoc index 839a0057e82fe..39a5638cce111 100644 --- a/x-pack/docs/en/rest-api/migration/upgrade.asciidoc +++ b/docs/reference/migration/apis/upgrade.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[migration-api-upgrade]] === Migration Upgrade API diff --git a/x-pack/docs/en/rest-api/migration.asciidoc b/docs/reference/migration/migration.asciidoc similarity index 64% rename from x-pack/docs/en/rest-api/migration.asciidoc rename to docs/reference/migration/migration.asciidoc index 51f1e5fae0f65..a54da21ab1409 100644 --- a/x-pack/docs/en/rest-api/migration.asciidoc +++ b/docs/reference/migration/migration.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[migration-api]] == Migration APIs @@ -8,6 +9,6 @@ The migration APIs simplify upgrading {xpack} indices from one version to anothe * <> * <> -include::migration/assistance.asciidoc[] -include::migration/upgrade.asciidoc[] -include::migration/deprecation.asciidoc[] +include::apis/assistance.asciidoc[] +include::apis/upgrade.asciidoc[] +include::apis/deprecation.asciidoc[] diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index e44eea9aa53f4..9ec57940dd299 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -21,7 +21,7 @@ directly to configure and access {xpack} features. include::info.asciidoc[] include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[] include::{es-repo-dir}/licensing/index.asciidoc[] -include::{xes-repo-dir}/rest-api/migration.asciidoc[] +include::{es-repo-dir}/migration/migration.asciidoc[] include::{xes-repo-dir}/rest-api/ml-api.asciidoc[] include::{xes-repo-dir}/rest-api/rollup-api.asciidoc[] include::{xes-repo-dir}/rest-api/security.asciidoc[] From 872418ff9499011bf6ee8da473fc2c49bbc67a43 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Wed, 20 Jun 2018 21:40:13 +0300 Subject: [PATCH 07/34] [DOCS] Significantly improve SQL docs Introduce SQL commands Move reserved keywords into an appendix Add section on security Introduce concepts section --- x-pack/docs/en/sql/appendix/index.asciidoc | 1 + .../syntax-reserved.asciidoc} | 5 +- x-pack/docs/en/sql/concepts.asciidoc | 63 ++++ x-pack/docs/en/sql/endpoints/cli.asciidoc | 16 - x-pack/docs/en/sql/endpoints/jdbc.asciidoc | 29 +- x-pack/docs/en/sql/endpoints/rest.asciidoc | 14 - .../docs/en/sql/endpoints/translate.asciidoc | 16 - x-pack/docs/en/sql/functions/index.asciidoc | 19 ++ x-pack/docs/en/sql/index.asciidoc | 14 +- .../docs/en/sql/language/data-types.asciidoc | 4 +- x-pack/docs/en/sql/language/index.asciidoc | 9 +- x-pack/docs/en/sql/language/syntax.asciidoc | 123 -------- .../language/syntax/describe-table.asciidoc | 20 ++ .../en/sql/language/syntax/index.asciidoc | 16 + .../en/sql/language/syntax/select.asciidoc | 284 ++++++++++++++++++ .../sql/language/syntax/show-columns.asciidoc | 12 + .../language/syntax/show-functions.asciidoc | 14 + .../sql/language/syntax/show-tables.asciidoc | 14 + x-pack/docs/en/sql/security.asciidoc | 37 +++ 19 files changed, 506 insertions(+), 204 deletions(-) create mode 100644 x-pack/docs/en/sql/appendix/index.asciidoc rename x-pack/docs/en/sql/{language/reserved.asciidoc => appendix/syntax-reserved.asciidoc} (98%) create mode 100644 x-pack/docs/en/sql/concepts.asciidoc delete mode 100644 x-pack/docs/en/sql/language/syntax.asciidoc create mode 100644 x-pack/docs/en/sql/language/syntax/describe-table.asciidoc create mode 100644 x-pack/docs/en/sql/language/syntax/index.asciidoc create mode 100644 x-pack/docs/en/sql/language/syntax/select.asciidoc create mode 100644 x-pack/docs/en/sql/language/syntax/show-columns.asciidoc create mode 100644 x-pack/docs/en/sql/language/syntax/show-functions.asciidoc create mode 100644 x-pack/docs/en/sql/language/syntax/show-tables.asciidoc create mode 100644 x-pack/docs/en/sql/security.asciidoc diff --git a/x-pack/docs/en/sql/appendix/index.asciidoc b/x-pack/docs/en/sql/appendix/index.asciidoc new file mode 100644 index 0000000000000..b00176a8a3f67 --- /dev/null +++ b/x-pack/docs/en/sql/appendix/index.asciidoc @@ -0,0 +1 @@ +include::syntax-reserved.asciidoc[] \ No newline at end of file diff --git a/x-pack/docs/en/sql/language/reserved.asciidoc b/x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc similarity index 98% rename from x-pack/docs/en/sql/language/reserved.asciidoc rename to x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc index 8dc62e90a9eb1..bbdefcbcb54aa 100644 --- a/x-pack/docs/en/sql/language/reserved.asciidoc +++ b/x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc @@ -1,5 +1,6 @@ -[[sql-spec-reserved]] -=== Reserved Keywords +[appendix] +[[sql-syntax-reserved]] += Reserved Keywords Table with reserved keywords that need to be quoted. Also provide an example to make it more obvious. diff --git a/x-pack/docs/en/sql/concepts.asciidoc b/x-pack/docs/en/sql/concepts.asciidoc new file mode 100644 index 0000000000000..f5eab6f37baf8 --- /dev/null +++ b/x-pack/docs/en/sql/concepts.asciidoc @@ -0,0 +1,63 @@ +[[sql-concepts]] +== Conventions and Terminology + +For clarity, it is important to establish the meaning behind certain words as, the same wording might convey different meanings to different readers depending on one's familiarity with SQL versus {es}. + +NOTE: This documentation while trying to be complete, does assume the reader has _basic_ understanding of {es} and/or SQL. If that is not the case, please continue reading the documentation however take notes and pursue the topics that are unclear either through the main {es} documentation or through the plethora of SQL material available in the open (there are simply too many excellent resources here to enumerate). + +As a general rule, {es-sql} as the name indicates provides a SQL interface to {es}. As such, it follows the SQL terminology and conventions first, whenever possible. However the backing engine itself is {es} for which {es-sql} was purposely created hence why features or concepts that are not available, or cannot be mapped correctly, in SQL appear +in {es-sql}. +Last but not least, {es-sql} tries to obey the https://en.wikipedia.org/wiki/Principle_of_least_astonishment[principle of least suprise], though as all things in the world, everything is relative. + +=== Mapping concepts across SQL and {es} + +While SQL and {es} have different terms for the way the data is organized (and different semantics), essentially their purpose is the same. + +So let's start from the bottom; these roughly are: + +[cols="1,1,5", options="header"] +|=== +|SQL +|{es} +|Description + +|`column` +|`field` +|In both cases, at the lowest level, data is stored in in _named_ entries, of a variety of <>, containing _one_ value. SQL calls such an entry a _column_ while {es} a _field_. +Notice that in {es} a field can contain _multiple_ values of the same type (esentially a list) while in SQL, a _column_ can contain _exactly_ one value of said type. +{es-sql} will do its best to preserve the SQL semantic and, depending on the query, reject those that return fields with more than one value. + +|`row` +|`document` +|++Column++s and ++field++s do _not_ exist by themselves; they are part of a `row` or a `document`. The two have slightly different semantics: a `row` tends to be _strict_ (and have more enforcements) while a `document` tends to be a bit more flexible or loose (while still having a structure). + +|`table` +|`index` +|The target against which queries, whether in SQL or {es} get executed against. + +|`schema` +|_implicit_ +|In RDBMS, `schema` is mainly a namespace of tables and typically used as a security boundary. {es} does not provide an equivalent concept for it. However when security is enabled, {es} automatically applies the security enforcement so that a role sees only the data it is allowed to (in SQL jargon, its _schema_). + +|`catalog` or `database` +|`cluster` instance +|In SQL, `catalog` or `database` are used interchangebly and represent a set of schemas that is, a number of tables. +In {es} the set of indices available are grouped in a `cluster`. The semantics also differ a bit; a `database` is essentially yet another namespace (which can have some implications on the way data is stored) while an {es} `cluster` is a runtime instance, or rather a set of at least one {es} instance (typically running distributed). +In practice this means that while in SQL one can potentially have multiple catalogs inside an instance, in {es} one is restricted to only _one_. + +|`cluster` +|`cluster` (federated) +|Traditionally in SQL, _cluster_ refers to a single RDMBS instance which contains a number of ++catalog++s or ++database++s (see above). The same word can be reused inside {es} as well however its semantic clarified a bit. + +While RDBMS tend to have only one running instance, on a single machine (_not_ distributed), {es} goes the opposite way and by default, is distributed and multi-instance. + +Further more, an {es} `cluster` can be connected to other ++cluster++s in a _federated_ fashion thus `cluster` means: + +single cluster:: +Multiple {es} instances typically distributed across machines, running within the same namespace. +multiple clusters:: +Multiple clusters, each with its own namespace, connected to each other in a federated setup (see <>). + +|=== + +As one can see while the mapping between the concepts are not exactly one to one and the semantics somewhat different, there are more things in common than differences. In fact, thanks to SQL declarative nature, many concepts can move across {es} transparently and the terminology of the two likely to be used interchangebly through-out the rest of the material. \ No newline at end of file diff --git a/x-pack/docs/en/sql/endpoints/cli.asciidoc b/x-pack/docs/en/sql/endpoints/cli.asciidoc index edbb1dcace4f1..e04fd96ab7198 100644 --- a/x-pack/docs/en/sql/endpoints/cli.asciidoc +++ b/x-pack/docs/en/sql/endpoints/cli.asciidoc @@ -1,4 +1,3 @@ -[role="xpack"] [[sql-cli]] == SQL CLI @@ -37,18 +36,3 @@ James S.A. Corey |Leviathan Wakes |561 |1306972800000 -------------------------------------------------- // TODO it'd be lovely to be able to assert that this is correct but // that is probably more work then it is worth right now. - -[[sql-cli-permissions]] -[NOTE] -=============================== -If you are using Security you need to add a few permissions to -users so they can run SQL. To run SQL using the CLI a user needs -`read`, `indices:admin/get`, and `cluster:monitor/main`. The -following example configures a role that can run SQL in the CLI -for the `test` and `bort` indices: - -["source","yaml",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-tests}/security/roles.yml[cli_jdbc] --------------------------------------------------- -=============================== diff --git a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc index 6959035bf09e4..84182f8b4a521 100644 --- a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc +++ b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc @@ -1,4 +1,3 @@ -[role="xpack"] [[sql-jdbc]] == SQL JDBC @@ -36,11 +35,11 @@ from `artifacts.elastic.co/maven` by adding it to the repositories list: [float] === Setup -The driver main class is `org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcDriver`. Note the driver -also implements the JDBC 4.0 +Service Provider+ mechanism meaning it is registerd automatically +The driver main class is `org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcDriver`. +Note the driver implements the JDBC 4.0 +Service Provider+ mechanism meaning it is registerd automatically as long as its available in the classpath. -Once registered, the driver expects the following syntax as an URL: +Once registered, the driver understands the following syntax as an URL: ["source","text",subs="attributes"] ---- @@ -120,12 +119,12 @@ Query timeout (in seconds). That is the maximum amount of time waiting for a que To put all of it together, the following URL: -["source","text",subs="attributes"] +["source","text"] ---- jdbc:es://http://server:3456/timezone=UTC&page.size=250 ---- -Opens up a {es-jdbc} connection to `server` on port `3456`, setting the JDBC timezone to `UTC` and its pagesize to `250` entries. +Opens up a {es-sql} connection to `server` on port `3456`, setting the JDBC connection timezone to `UTC` and its pagesize to `250` entries. === API usage @@ -175,20 +174,4 @@ connection. For example: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{jdbc-tests}/SimpleExampleTestCase.java[simple_example] --------------------------------------------------- - -[[sql-jdbc-permissions]] -[NOTE] -=============================== -If you are using Security you need to add a few permissions to -users so they can run SQL. To run SQL a user needs `read` and -`indices:admin/get`. Some parts of the API require -`cluster:monitor/main`. The following example configures a -role that can run SQL in JDBC querying the `test` and `bort` -indices: - -["source","yaml",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-tests}/security/roles.yml[cli_jdbc] --------------------------------------------------- -=============================== +-------------------------------------------------- \ No newline at end of file diff --git a/x-pack/docs/en/sql/endpoints/rest.asciidoc b/x-pack/docs/en/sql/endpoints/rest.asciidoc index d31b03d3e7736..fa5093f8de528 100644 --- a/x-pack/docs/en/sql/endpoints/rest.asciidoc +++ b/x-pack/docs/en/sql/endpoints/rest.asciidoc @@ -186,17 +186,3 @@ or fewer results though. `time_zone` is the time zone to use for date functions and date parsing. `time_zone` defaults to `utc` and can take any values documented http://www.joda.org/joda-time/apidocs/org/joda/time/DateTimeZone.html[here]. - -[[sql-rest-permissions]] -[NOTE] -=============================== -If you are using Security you need to add a few permissions to -users so they can run SQL. To run SQL a user needs `read` and -`indices:admin/get`. The following example configures a role -that can run SQL against the `test` and `bort` indices: - -["source","yaml",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-tests}/security/roles.yml[rest] --------------------------------------------------- -=============================== diff --git a/x-pack/docs/en/sql/endpoints/translate.asciidoc b/x-pack/docs/en/sql/endpoints/translate.asciidoc index 9c1d71af5d35e..be6a77a3caa44 100644 --- a/x-pack/docs/en/sql/endpoints/translate.asciidoc +++ b/x-pack/docs/en/sql/endpoints/translate.asciidoc @@ -1,4 +1,3 @@ -[role="xpack"] [[sql-translate]] == SQL Translate API @@ -57,18 +56,3 @@ the normal <> API. The request body accepts all of the <> that the <> accepts except `cursor`. - -[[sql-translate-permissions]] -[NOTE] -=============================== -If you are using Security you need to add a few permissions to -users so they can run translate SQL. To translate SQL a user -needs `read` and `indices:admin/get`. The following example -configures a role that can run SQL against the `test` and -`bort` indices: - -["source","yaml",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-tests}/security/roles.yml[rest] --------------------------------------------------- -=============================== diff --git a/x-pack/docs/en/sql/functions/index.asciidoc b/x-pack/docs/en/sql/functions/index.asciidoc index a4e7028cf39c3..dd68370dde3e7 100644 --- a/x-pack/docs/en/sql/functions/index.asciidoc +++ b/x-pack/docs/en/sql/functions/index.asciidoc @@ -348,6 +348,25 @@ include-tagged::{sql-specs}/datetime.csv-spec[minuteOfHour] include-tagged::{sql-specs}/datetime.csv-spec[secondOfMinute] -------------------------------------------------- +* Extract + +As an alternative, one can support `EXTRACT` to extract fields from datetimes. +You can run any <> +with `EXTRACT( FROM )`. So + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[extractDayOfYear] +-------------------------------------------------- + +is the equivalent to + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[dayOfYear] +-------------------------------------------------- + + [[sql-functions-aggregate]] === Aggregate Functions diff --git a/x-pack/docs/en/sql/index.asciidoc b/x-pack/docs/en/sql/index.asciidoc index 902ea8ada7e22..4c2130208927a 100644 --- a/x-pack/docs/en/sql/index.asciidoc +++ b/x-pack/docs/en/sql/index.asciidoc @@ -19,7 +19,11 @@ indices and return results in tabular format. <>:: Overview of {es-sql} and its features. <>:: - Start using SQL right away in {es} + Start using SQL right away in {es}. +<>:: + Language conventions across SQL and {es}. +<>:: + Securing {es-sql} and {es}. <>:: Accepts SQL in a JSON document, executes it, and returns the results. @@ -34,15 +38,17 @@ indices and return results in tabular format. <>:: List of functions and operators supported. <>:: - Overview of the {es-sql} language, such as data types, syntax and - reserved keywords. - + Overview of the {es-sql} language, such as supported data types, commands and + syntax. -- include::overview.asciidoc[] include::getting-started.asciidoc[] +include::concepts.asciidoc[] +include::security.asciidoc[] include::endpoints/index.asciidoc[] include::functions/index.asciidoc[] include::language/index.asciidoc[] +include::appendix/index.asciidoc[] :jdbc-tests!: diff --git a/x-pack/docs/en/sql/language/data-types.asciidoc b/x-pack/docs/en/sql/language/data-types.asciidoc index a01c2fda5c726..7e5f045aa6ce9 100644 --- a/x-pack/docs/en/sql/language/data-types.asciidoc +++ b/x-pack/docs/en/sql/language/data-types.asciidoc @@ -1,5 +1,5 @@ [[sql-data-types]] -=== Data Types +== Data Types Most of {es} <> are available in {es-sql}, as indicated below: @@ -42,7 +42,7 @@ uses the data type _particularities_ of the former over the latter as ultimately [[sql-multi-field]] [float] -==== SQL and multi-fields +=== SQL and multi-fields A core concept in {es} is that of an `analyzed` field, that is a full-text value that is interpreted in order to be effectively indexed. These fields are of type <> and are not used for sorting or aggregations as their actual value depends on the <> used hence why {es} also offers the <> type for storing the _exact_ diff --git a/x-pack/docs/en/sql/language/index.asciidoc b/x-pack/docs/en/sql/language/index.asciidoc index 24bf450f1e42e..fdf6f3e7950ca 100644 --- a/x-pack/docs/en/sql/language/index.asciidoc +++ b/x-pack/docs/en/sql/language/index.asciidoc @@ -1,9 +1,10 @@ [[sql-spec]] == SQL Language -This chapter describes the SQL syntax and data types supported in X-Pack. -As a general rule, the syntax tries to adhere as much as possible to ANSI SQL to make the transition seamless. +This chapter describes the SQL semantics supported in X-Pack namely: + +<>:: Data types +<>:: Commands include::data-types.asciidoc[] -include::syntax.asciidoc[] -include::reserved.asciidoc[] +include::syntax/index.asciidoc[] diff --git a/x-pack/docs/en/sql/language/syntax.asciidoc b/x-pack/docs/en/sql/language/syntax.asciidoc deleted file mode 100644 index 5b837c91db2b1..0000000000000 --- a/x-pack/docs/en/sql/language/syntax.asciidoc +++ /dev/null @@ -1,123 +0,0 @@ -[[sql-spec-syntax]] -=== SQL Statement Syntax - -// Big list of the entire syntax in SQL - -// Each entry might get its own file and code snippet - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/select.sql-spec[wildcardWithOrder] --------------------------------------------------- - - -[[sql-spec-syntax-order-by]] -==== `ORDER BY` - -Elasticsearch supports `ORDER BY` for consistent ordering. You add -any field in the index that has <> or -`SCORE()` to sort by `_score`. By default SQL sorts on what it -considers to be the most efficient way to get the results. - -So sorting by a field looks like: - -[source,js] --------------------------------------------------- -POST /_xpack/sql?format=txt -{ - "query": "SELECT * FROM library ORDER BY page_count DESC LIMIT 5" -} --------------------------------------------------- -// CONSOLE -// TEST[setup:library] - -which results in something like: - -[source,text] --------------------------------------------------- - author | name | page_count | release_date ------------------+--------------------+---------------+------------------------ -Peter F. Hamilton|Pandora's Star |768 |2004-03-02T00:00:00.000Z -Vernor Vinge |A Fire Upon the Deep|613 |1992-06-01T00:00:00.000Z -Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -Alastair Reynolds|Revelation Space |585 |2000-03-15T00:00:00.000Z -James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00.000Z --------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/] -// TESTRESPONSE[_cat] - -[[sql-spec-syntax-order-by-score]] -For sorting by score to be meaningful you need to include a full -text query in the `WHERE` clause. If you include multiple full -text queries in the `WHERE` clause then their scores will be -combined using the same rules as Elasticsearch's -<>. Here is a simple example: - -[source,js] --------------------------------------------------- -POST /_xpack/sql?format=txt -{ - "query": "SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY SCORE() DESC" -} --------------------------------------------------- -// CONSOLE -// TEST[setup:library] - -Which results in something like: - -[source,text] --------------------------------------------------- - SCORE() | author | name | page_count | release_date ----------------+---------------+-------------------+---------------+------------------------ -2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z -1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z -1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z --------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/ s/\(/\\\(/ s/\)/\\\)/] -// TESTRESPONSE[_cat] - -Note that you can return `SCORE()` by adding it to the where clause. This -is possible even if you are not sorting by `SCORE()`: - -[source,js] --------------------------------------------------- -POST /_xpack/sql?format=txt -{ - "query": "SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY page_count DESC" -} --------------------------------------------------- -// CONSOLE -// TEST[setup:library] - -[source,text] --------------------------------------------------- - SCORE() | author | name | page_count | release_date ----------------+---------------+-------------------+---------------+------------------------ -2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z -1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z -1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z --------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/ s/\(/\\\(/ s/\)/\\\)/] -// TESTRESPONSE[_cat] - - -[[sql-spec-syntax-extract]] -==== `EXTRACT` - -Elasticsearch supports `EXTRACT` to extract fields from datetimes. -You can run any <> -with `EXTRACT( FROM )`. So - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[extractDayOfYear] --------------------------------------------------- - -is the equivalent to - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[dayOfYear] --------------------------------------------------- diff --git a/x-pack/docs/en/sql/language/syntax/describe-table.asciidoc b/x-pack/docs/en/sql/language/syntax/describe-table.asciidoc new file mode 100644 index 0000000000000..114def470b181 --- /dev/null +++ b/x-pack/docs/en/sql/language/syntax/describe-table.asciidoc @@ -0,0 +1,20 @@ +[[sql-syntax-describe-table]] +=== DESCRIBE TABLE + +.Synopsis +[source, sql] +---- +DESCRIBE table +---- + +or + +[source, sql] +---- +DESC table +---- + + +.Description + +`DESC` and `DESCRIBE` are aliases to <>. diff --git a/x-pack/docs/en/sql/language/syntax/index.asciidoc b/x-pack/docs/en/sql/language/syntax/index.asciidoc new file mode 100644 index 0000000000000..e0e970edae14b --- /dev/null +++ b/x-pack/docs/en/sql/language/syntax/index.asciidoc @@ -0,0 +1,16 @@ +[[sql-commands]] +== SQL Commands + +This section contains the list of SQL commands supported by {es-sql} along with their syntax: + +<>:: Describe a table. +<>:: Retrieve rows from zero or more tables. +<>:: List columns in table. +<>:: List supported functions. +<>:: List tables available. + +include::describe-table.asciidoc[] +include::select.asciidoc[] +include::show-columns.asciidoc[] +include::show-functions.asciidoc[] +include::show-tables.asciidoc[] diff --git a/x-pack/docs/en/sql/language/syntax/select.asciidoc b/x-pack/docs/en/sql/language/syntax/select.asciidoc new file mode 100644 index 0000000000000..f39cbc0c2f8ca --- /dev/null +++ b/x-pack/docs/en/sql/language/syntax/select.asciidoc @@ -0,0 +1,284 @@ +[[sql-syntax-select]] +=== SELECT + +.Synopsis +[source, sql] +---- +SELECT select_expr [, ...] +[ FROM table_name ] +[ WHERE condition ] +[ GROUP BY grouping_element [, ...] ] +[ HAVING condition] +[ ORDER BY expression [ ASC | DESC ] [, ...] ] +[ LIMIT [ count ] ] +---- + +.Description + +Retrieves rows from zero or more tables. + +The general execution of `SELECT` is as follows: + +. All elements in the `FROM` list are computed (each element can be base or alias table). Currently `FROM` supports exactly one table. Do note however that the table name can be a pattern (see <> below). +. If the `WHERE` clause is specified, all rows that do not satisfy the condition are eliminated from the output. (See <> below.) +. If the `GROUP BY` clause is specified, or if there are aggregate function calls, the output is combined into groups of rows that match on one or more values, and the results of aggregate functions are computed. If the `HAVING` clause is present, it eliminates groups that do not satisfy the given condition. (See <> and <> below.) +. The actual output rows are computed using the `SELECT` output expressions for each selected row or row group. +. If the `ORDER BY` clause is specified, the returned rows are sorted in the specified order. If `ORDER BY` is not given, the rows are returned in whatever order the system finds fastest to produce. (See <> below.) +. If the `LIMIT` is specified, the `SELECT` statement only returns a subset of the result rows. (See <> below.) + + +[[sql-syntax-select-list]] +==== `SELECT` List + +`SELECT` list, namely the expressions between `SELECT` and `FROM`, represent the output rows of the `SELECT` statement. + +As with a table, every output column of a `SELECT` has a name which can be either specified per column through the `AS` keyword : + +[source,sql] +---- +SELECT column AS c +---- + +assigned by {es-sql} if no name is given: + +[source,sql] +---- +SELECT 1 + 1 +---- + +or if it's a simple column reference, use its name as the column name: + +[source,sql] +---- +SELECT col FROM table +---- + +[[sql-syntax-select-wildcard]] +==== Wildcard + +To select all the columns in the source, one can use `*`: + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/select.sql-spec[wildcardWithOrder] +-------------------------------------------------- + +which essentially returns all columsn found. + +[[sql-syntax-from]] +[float] +==== FROM Clause + +The `FROM` clause specifies one table for the `SELECT` and has the following syntax: + +[source, sql] +---- +FROM table_name [ [ AS ] alias ] +---- + +where: + +`table_name`:: + +Represents the name (optionally qualified) of an existing table, either a concrete or base one (actual index) or alias. +If the table name contains special SQL characters (such as `.`,`-`,etc...) use double quotes to escape them: +[source, sql] +---- +SELECT ... FROM "some-table" +---- + +The name can be a <> pointing to multiple indices (likely requiring quoting as mentioned above) with the restriction that *all* resolved concrete tables have **exact mapping**. + +`alias`:: +A substitute name for the `FROM` item containing the alias. An alias is used for brevity or to eliminate ambiguity. When an alias is provided, it completely hides the actual name of the table and must be used in its place. + +[[sql-syntax-where]] +[float] +==== WHERE Clause + +The optional `WHERE` clause is used to filter rows from the query and has the following syntax: + +[source, sql] +---- +WHERE condition +---- + +where: + +`condition`:: + +Represents an expression that evaluates to a `boolean`. Only the rows that match the condition (to `true`) are returned. + +[[sql-syntax-group-by]] +[float] +==== GROUP BY + +The `GROUP BY` clause is used to divide the results into groups of rows on matching values from the designated columns. It has the following syntax: + +[source, sql] +---- +GROUP BY grouping_element [, ...] +---- + +where: + +`grouping_element`:: + +Represents an expression on which rows are being grouped _on_. It can be a column name, name or ordinal number of a column or an arbitrary expression of column values. + +When a `GROUP BY` clause is used in a `SELECT`, _all_ output expressions must be either aggregate functions or expressions used for grouping or derivates of (otherwise there would be more than one possible value to return for each ungrouped column). + +[[sql-syntax-having]] +[float] +==== HAVING + +The `HAVING` clause can be used _only_ along aggregate functions (and thus `GROUP BY`) to filter what groups are kept or not and has the following syntax: + +[source, sql] +---- +GROUP BY condition +---- + +where: + +`condition`:: + +Represents an expression that evaluates to a `boolean`. Only groups that match the condition (to `true`) are returned. + +Both `WHERE` and `HAVING` are used for filtering however there are several differences between them: + +. `WHERE` works on individual *rows*, `HAVING` works on the *groups* created by ``GROUP BY`` +. `WHERE` is evaluated *before* grouping, `HAVING` is evaluated *after* grouping + +Note that it is possible to have a `HAVING` clause without a ``GROUP BY``. In this case, an __implicit grouping__ is applied, meaning all selected rows are considered to form a single group and `HAVING` can be applied on any of the aggregate functions specified on this group. ` +As such a query emits only a single row (as there is only a single group), `HAVING` condition returns either one row (the group) or zero if the condition fails. + +[[sql-syntax-order-by]] +[float] +==== ORDER BY + +The `ORDER BY` clause is used to sort the results of `SELECT` by one or more expressions: + +[source, sql] +---- +ORDER BY expression [ ASC | DESC ] [, ...] +---- + +where: + +`expression`:: + +Represents an input column, an output column or an ordinal number of the position (starting from one) of an output column. Additionally, ordering can be done based on the results _score_ ` +The direction, if not specified, is by default `ASC` (ascending). ` +Regardless of the ordering specified, null values are ordered last (at the end). + +IMPORTANT: When used along-side, `GROUP BY` expression can point _only_ to the columns used for grouping. + +For example, the following query sorts by an arbitrary input field (`page_count`): + +[source,js] +-------------------------------------------------- +POST /_xpack/sql?format=txt +{ + "query": "SELECT * FROM library ORDER BY page_count DESC LIMIT 5" +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:library] + +which results in something like: + +[source,text] +-------------------------------------------------- + author | name | page_count | release_date +-----------------`--------------------`---------------`------------------------ +Peter F. Hamilton|Pandora's Star |768 |2004-03-02T00:00:00.000Z +Vernor Vinge |A Fire Upon the Deep|613 |1992-06-01T00:00:00.000Z +Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +Alastair Reynolds|Revelation Space |585 |2000-03-15T00:00:00.000Z +James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00.000Z +-------------------------------------------------- +// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/] +// TESTRESPONSE[_cat] + +[[sql-syntax-order-by-score]] +==== Order By Score + +When doing full-text queries in the `WHERE` clause, results can be returned based on their +{defguide}/relevance-intro.html[score] or _relevance_ to the given query. + +NOTE: When doing multiple text queries in the `WHERE` clause then, their scores will be +combined using the same rules as {es}'s +<>. + +To sort based on the `score`, use the special function `SCORE()`: + +[source,js] +-------------------------------------------------- +POST /_xpack/sql?format=txt +{ + "query": "SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY SCORE() DESC" +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:library] + +Which results in something like: + +[source,text] +-------------------------------------------------- + SCORE() | author | name | page_count | release_date +---------------`---------------`-------------------`---------------`------------------------ +2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z +1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z +1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z +-------------------------------------------------- +// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/ s/\(/\\\(/ s/\)/\\\)/] +// TESTRESPONSE[_cat] + +Note that you can return `SCORE()` by adding it to the where clause. This +is possible even if you are not sorting by `SCORE()`: + +[source,js] +-------------------------------------------------- +POST /_xpack/sql?format=txt +{ + "query": "SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY page_count DESC" +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:library] + +[source,text] +-------------------------------------------------- + SCORE() | author | name | page_count | release_date +---------------`---------------`-------------------`---------------`------------------------ +2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z +1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z +1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z +-------------------------------------------------- +// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/ s/\(/\\\(/ s/\)/\\\)/] +// TESTRESPONSE[_cat] + +NOTE: +Trying to return `score` from a non full-text queries will return the same value for all results, as +all are equilley relevant. + +[[sql-syntax-limit]] +[float] +==== LIMIT + +The `LIMIT` clause restricts (limits) the number of rows returns using the format: + +[source, sql] +---- +LIMIT ( count | ALL ) +---- + +where + +count:: is a positive integer or zero indicating the maximum *possible* number of results being returned (as there might be less matches than the limit). If `0` is specified, no results are returned. + +ALL:: indicates there is no limit and thus all results are being returned. diff --git a/x-pack/docs/en/sql/language/syntax/show-columns.asciidoc b/x-pack/docs/en/sql/language/syntax/show-columns.asciidoc new file mode 100644 index 0000000000000..2e7c8f7bfca69 --- /dev/null +++ b/x-pack/docs/en/sql/language/syntax/show-columns.asciidoc @@ -0,0 +1,12 @@ +[[sql-syntax-show-columns]] +=== SHOW COLUMNS + +.Synopsis +[source, sql] +---- +SHOW COLUMNS [ FROM | IN ] ? table +---- + +.Description + +List the columns in table and their data type (and other attributes). diff --git a/x-pack/docs/en/sql/language/syntax/show-functions.asciidoc b/x-pack/docs/en/sql/language/syntax/show-functions.asciidoc new file mode 100644 index 0000000000000..197b9e8cb3b79 --- /dev/null +++ b/x-pack/docs/en/sql/language/syntax/show-functions.asciidoc @@ -0,0 +1,14 @@ +[[sql-syntax-show-functions]] +=== SHOW FUNCTIONS + +.Synopsis +[source, sql] +---- +SHOW FUNCTIONS [ LIKE? pattern<1>? ]? +---- + +<1> SQL match pattern + +.Description + +List all the SQL functions and their type. The `LIKE` clause can be used to restrict the list of names to the given pattern. diff --git a/x-pack/docs/en/sql/language/syntax/show-tables.asciidoc b/x-pack/docs/en/sql/language/syntax/show-tables.asciidoc new file mode 100644 index 0000000000000..9266b6d58058b --- /dev/null +++ b/x-pack/docs/en/sql/language/syntax/show-tables.asciidoc @@ -0,0 +1,14 @@ +[[sql-syntax-show-tables]] +=== SHOW TABLES + +.Synopsis +[source, sql] +---- +SHOW TABLES [ LIKE? pattern<1>? ]? +---- + +<1> SQL match pattern + +.Description + +List the tables available to the current user and their type. The `LIKE` clause can be used to restrict the list of names to the given pattern. diff --git a/x-pack/docs/en/sql/security.asciidoc b/x-pack/docs/en/sql/security.asciidoc new file mode 100644 index 0000000000000..bba73a2a4de6d --- /dev/null +++ b/x-pack/docs/en/sql/security.asciidoc @@ -0,0 +1,37 @@ +[[sql-security]] +== Security + +{es-sql} integrates with security, if this is enabled on your cluster. +In such a scenario, {es-sql} supports both security at the transport layer (by encrypting the communication between the consumer and the server) and authentication (for the access layer). + +[float] +==== SSL/TLS configuration + +In case of an encrypted transport, the SSL/TLS support needs to be enabled in {es-sql} to properly establish communication with {es}. This is done by setting the `ssl` property to `true` or by using the `https` prefix in the URL. + +Depending on your SSL configuration (whether the certificates are signed by a CA or not, whether they are global at JVM level or just local to one application), might require setting up the `keystore` and/or `truststore`, that is where the _credentials_ are stored (`keystore` - which typically stores private keys and certificates) and how to _verify_ them (`truststore` - which typically stores certificates from third party also known as CA - certificate authorities). + +Typically (and again, do note that your environment might differ significantly), if the SSL setup for {es-sql} is not already done at the JVM level, one needs to setup the keystore if the {es-sql} security requires client authentication (PKI - Public Key Infrastructure), and setup `truststore` if SSL is enabled. + +[float] +==== Authentication + +The authentication support in {es-sql} is of two types: + +Username/Password:: Set these through `user` and `password` properties. +PKI/X.509:: Use X.509 certificates to authenticate {es-sql} to {es}. For this, one would need to setup the `keystore` containing the private key and certificate to the appropriate user (configured in {es}) and the `truststore` with the CA certificate used to sign the SSL/TLS certificates in the {es} cluster. That is, one should setup the key to authenticate {es-sql} and also to verify that is the right one. To do so, one should set the `ssl.keystore.location` and `ssl.truststore.location` properties to indicate the `keystore` and `truststore` to use. It is recommended to have these secured through a password in which case `ssl.keystore.pass` and `ssl.truststore.pass` properties are required. + +[float] +[[sql-security-permissions]] +==== Permissions (server-side) +Lastly, one the server one need to add a few permissions to +users so they can run SQL. To run SQL a user needs `read` and +`indices:admin/get` permissions at minimum while some parts of +the API require `cluster:monitor/main`. + +The following example configures a role that can run SQL in JDBC querying the `test` and `bort` +indices: + +["source","yaml",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-tests}/security/roles.yml[cli_jdbc] +-------------------------------------------------- + From bd06563e78688fa3a94a8b17285a0a6c7565fbb9 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 21 Jun 2018 10:08:50 -0700 Subject: [PATCH 08/34] [DOCS] Creates field and document level security overview (#30937) --- ...field-and-document-access-control.asciidoc | 404 +----------------- .../authorization/role-templates.asciidoc | 71 +++ .../authorization/set-security-user.asciidoc | 61 +++ 3 files changed, 140 insertions(+), 396 deletions(-) create mode 100644 x-pack/docs/en/security/authorization/role-templates.asciidoc create mode 100644 x-pack/docs/en/security/authorization/set-security-user.asciidoc diff --git a/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc b/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc index a1aa44895c6a6..119a090232c2f 100644 --- a/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc +++ b/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc @@ -3,9 +3,11 @@ === Setting up field and document level security You can control access to data within an index by adding field and document level -security permissions to a role. Field level security permissions restrict access -to particular fields within a document. Document level security permissions -restrict access to particular documents within an index. +security permissions to a role. +<> restrict access to +particular fields within a document. +<> restrict access +to particular documents within an index. NOTE: Document and field level security is currently meant to operate with read-only privileged accounts. Users with document and field level @@ -23,399 +25,6 @@ grant wider access than intended. Each user has a single set of field level and document level permissions per index. See <>. ===================================================================== -[[field-level-security]] -==== Field level security - -To enable field level security, specify the fields that each role can access -as part of the indices permissions in a role definition. Field level security is -thus bound to a well-defined set of indices (and potentially a set of -<>). - -The following role definition grants read access only to the `category`, -`@timestamp`, and `message` fields in all the `events-*` indices. - -[source,js] --------------------------------------------------- -{ - "indices": [ - { - "names": [ "events-*" ], - "privileges": [ "read" ], - "field_security" : { - "grant" : [ "category", "@timestamp", "message" ] - } - } - ] -} --------------------------------------------------- - -Access to the following meta fields is always allowed: `_id`, -`_type`, `_parent`, `_routing`, `_timestamp`, `_ttl`, `_size` and `_index`. If -you specify an empty list of fields, only these meta fields are accessible. - -NOTE: Omitting the fields entry entirely disables field-level security. - -You can also specify field expressions. For example, the following -example grants read access to all fields that start with an `event_` prefix: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant" : [ "event_*" ] - } - } - ] -} --------------------------------------------------- - -Use the dot notations to refer to nested fields in more complex documents. For -example, assuming the following document: - -[source,js] --------------------------------------------------- -{ - "customer": { - "handle": "Jim", - "email": "jim@mycompany.com", - "phone": "555-555-5555" - } -} --------------------------------------------------- - -The following role definition enables only read access to the customer `handle` -field: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant" : [ "customer.handle" ] - } - } - ] -} --------------------------------------------------- - -This is where wildcard support shines. For example, use `customer.*` to enable -only read access to the `customer` data: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant" : [ "customer.*" ] - } - } - ] -} --------------------------------------------------- - -You can deny permission to access fields with the following syntax: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant" : [ "*"], - "except": [ "customer.handle" ] - } - } - ] -} --------------------------------------------------- - - -The following rules apply: - -* The absence of `field_security` in a role is equivalent to * access. -* If permission has been granted explicitly to some fields, you can specify -denied fields. The denied fields must be a subset of the fields to which -permissions were granted. -* Defining denied and granted fields implies access to all granted fields except -those which match the pattern in the denied fields. - -For example: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "except": [ "customer.handle" ], - "grant" : [ "customer.*" ] - } - } - ] -} --------------------------------------------------- - -In the above example, users can read all fields with the prefix "customer." -except for "customer.handle". - -An empty array for `grant` (for example, `"grant" : []`) means that access has -not been granted to any fields. - -===== Field Level Security and Roles - -When a user has several roles that specify field level permissions, the -resulting field level permissions per index are the union of the individual role -permissions. For example, if these two roles are merged: - -[source,js] --------------------------------------------------- -{ - // role 1 - ... - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant": [ "a.*" ], - "except" : [ "a.b*" ] - } - } - ] -} - -{ - // role 2 - ... - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant": [ "a.b*" ], - "except" : [ "a.b.c*" ] - } - } - ] -} --------------------------------------------------- - -The resulting permission is equal to: - -[source,js] --------------------------------------------------- -{ - // role 1 + role 2 - ... - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "field_security" : { - "grant": [ "a.*" ], - "except" : [ "a.b.c*" ] - } - } - ] -} --------------------------------------------------- - - -[[document-level-security]] -==== Document level security - -Document level security restricts the documents that users have read access to. -To enable document level security, specify a query that matches all the -accessible documents as part of the indices permissions within a role definition. -Document level security is thus bound to a well defined set of indices. - -Enabling document level security restricts which documents can be accessed from -any document-based read API. To enable document level security, you use a query -to specify the documents that each role can access in the `roles.yml` file. -You specify the document query with the `query` option. The document query is -associated with a particular index or index pattern and operates in conjunction -with the privileges specified for the indices. - -The following role definition grants read access only to documents that -belong to the `click` category within all the `events-*` indices: - -[source,js] --------------------------------------------------- -{ - "indices": [ - { - "names": [ "events-*" ], - "privileges": [ "read" ], - "query": "{\"match\": {\"category\": \"click\"}}" - } - ] -} --------------------------------------------------- - -NOTE: Omitting the `query` entry entirely disables document level security for - the respective indices permission entry. - -The specified `query` expects the same format as if it was defined in the -search request and supports the full {es} {ref}/query-dsl.html[Query DSL]. - -For example, the following role grants read access only to the documents whose -`department_id` equals `12`: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "*" ], - "privileges" : [ "read" ], - "query" : { - "term" : { "department_id" : 12 } - } - } - ] -} --------------------------------------------------- - -NOTE: `query` also accepts queries written as string values. - -[[templating-role-query]] -===== Templating a role query - -You can use Mustache templates in a role query to insert the username of the -current authenticated user into the role. Like other places in {es} that support -templating or scripting, you can specify inline, stored, or file-based templates -and define custom parameters. You access the details for the current -authenticated user through the `_user` parameter. - -For example, the following role query uses a template to insert the username -of the current authenticated user: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "my_index" ], - "privileges" : [ "read" ], - "query" : { - "template" : { - "source" : { - "term" : { "acl.username" : "{{_user.username}}" } - } - } - } - } - ] -} --------------------------------------------------- - -You can access the following information through the `_user` variable: - -[options="header"] -|====== -| Property | Description -| `_user.username` | The username of the current authenticated user. -| `_user.full_name` | If specified, the full name of the current authenticated user. -| `_user.email` | If specified, the email of the current authenticated user. -| `_user.roles` | If associated, a list of the role names of the current authenticated user. -| `_user.metadata` | If specified, a hash holding custom metadata of the current authenticated user. -|====== - -You can also access custom user metadata. For example, if you maintain a -`group_id` in your user metadata, you can apply document level security -based on the `group.id` field in your documents: - -[source,js] --------------------------------------------------- -{ - "indices" : [ - { - "names" : [ "my_index" ], - "privileges" : [ "read" ], - "query" : { - "template" : { - "source" : { - "term" : { "group.id" : "{{_user.metadata.group_id}}" } - } - } - } - } - ] -} --------------------------------------------------- - -[[set-security-user-processor]] -===== Set security user ingest processor - -If an index is shared by many small users it makes sense to put all these users -into the same index. Having a dedicated index or shard per user is wasteful. -To guarantee that a user reads only their own documents, it makes sense to set up -document level security. In this scenario, each document must have the username -or role name associated with it, so that this information can be used by the -role query for document level security. This is a situation where the -`set_security_user` ingest processor can help. - -NOTE: Document level security doesn't apply to write APIs. You must use unique -ids for each user that uses the same index, otherwise they might overwrite other -users' documents. The ingest processor just adds properties for the current -authenticated user to the documents that are being indexed. - -The `set_security_user` processor attaches user-related details (such as -`username`, `roles`, `email`, `full_name` and `metadata` ) from the current -authenticated user to the current document by pre-processing the ingest. When -you index data with an ingest pipeline, user details are automatically attached -to the document. For example: - -[source,js] --------------------------------------------------- -PUT shared-logs/log/1?pipeline=my_pipeline_id -{ - ... -} --------------------------------------------------- - -Read the {ref}/ingest.html[ingest docs] for more information -about setting up a pipeline and other processors. - -[[set-security-user-options]] -.Set Security User Options -[options="header"] -|====== -| Name | Required | Default | Description -| `field` | yes | - | The field to store the user information into. -| `properties` | no | [`username`, `roles`, `email`, `full_name`, `metadata`] | Controls what user related properties are added to the `field`. -|====== - -The following example adds all user details for the current authenticated user -to the `user` field for all documents that are processed by this pipeline: - -[source,js] --------------------------------------------------- -{ - "processors" : [ - { - "set_security_user": { - "field": "user" - } - } - ] -} --------------------------------------------------- - [[multiple-roles-dls-fls]] ==== Multiple roles with document and field level security @@ -447,3 +56,6 @@ fields. If you need to restrict access to both documents and fields, consider splitting documents by index instead. + +include::role-templates.asciidoc[] +include::set-security-user.asciidoc[] diff --git a/x-pack/docs/en/security/authorization/role-templates.asciidoc b/x-pack/docs/en/security/authorization/role-templates.asciidoc new file mode 100644 index 0000000000000..1bad73a5d1e94 --- /dev/null +++ b/x-pack/docs/en/security/authorization/role-templates.asciidoc @@ -0,0 +1,71 @@ +[[templating-role-query]] +==== Templating a role query + +When you create a role, you can specify a query that defines the +<>. You can +optionally use Mustache templates in the role query to insert the username of the +current authenticated user into the role. Like other places in {es} that support +templating or scripting, you can specify inline, stored, or file-based templates +and define custom parameters. You access the details for the current +authenticated user through the `_user` parameter. + +For example, the following role query uses a template to insert the username +of the current authenticated user: + +[source,js] +-------------------------------------------------- +POST /_xpack/security/role/example1 +{ + "indices" : [ + { + "names" : [ "my_index" ], + "privileges" : [ "read" ], + "query" : { + "template" : { + "source" : { + "term" : { "acl.username" : "{{_user.username}}" } + } + } + } + } + ] +} +-------------------------------------------------- +// CONSOLE + +You can access the following information through the `_user` variable: + +[options="header"] +|====== +| Property | Description +| `_user.username` | The username of the current authenticated user. +| `_user.full_name` | If specified, the full name of the current authenticated user. +| `_user.email` | If specified, the email of the current authenticated user. +| `_user.roles` | If associated, a list of the role names of the current authenticated user. +| `_user.metadata` | If specified, a hash holding custom metadata of the current authenticated user. +|====== + +You can also access custom user metadata. For example, if you maintain a +`group_id` in your user metadata, you can apply document level security +based on the `group.id` field in your documents: + +[source,js] +-------------------------------------------------- +POST /_xpack/security/role/example2 +{ + "indices" : [ + { + "names" : [ "my_index" ], + "privileges" : [ "read" ], + "query" : { + "template" : { + "source" : { + "term" : { "group.id" : "{{_user.metadata.group_id}}" } + } + } + } + } + ] +} +-------------------------------------------------- +// CONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/security/authorization/set-security-user.asciidoc b/x-pack/docs/en/security/authorization/set-security-user.asciidoc new file mode 100644 index 0000000000000..92b9ae275aec8 --- /dev/null +++ b/x-pack/docs/en/security/authorization/set-security-user.asciidoc @@ -0,0 +1,61 @@ +[[set-security-user-processor]] +==== Pre-processing documents to add security details + +// If an index is shared by many small users it makes sense to put all these users +// into the same index. Having a dedicated index or shard per user is wasteful. +// TBD: It's unclear why we're putting users in an index here. + +To guarantee that a user reads only their own documents, it makes sense to set up +document level security. In this scenario, each document must have the username +or role name associated with it, so that this information can be used by the +role query for document level security. This is a situation where the +`set_security_user` ingest processor can help. + +NOTE: Document level security doesn't apply to write APIs. You must use unique +ids for each user that uses the same index, otherwise they might overwrite other +users' documents. The ingest processor just adds properties for the current +authenticated user to the documents that are being indexed. + +The `set_security_user` processor attaches user-related details (such as +`username`, `roles`, `email`, `full_name` and `metadata` ) from the current +authenticated user to the current document by pre-processing the ingest. When +you index data with an ingest pipeline, user details are automatically attached +to the document. For example: + +[source,js] +-------------------------------------------------- +PUT shared-logs/log/1?pipeline=my_pipeline_id +{ + ... +} +-------------------------------------------------- +// NOTCONSOLE + +For more information about setting up a pipeline and other processors, see +{ref}/ingest.html[ingest node]. + +[[set-security-user-options]] +.Set Security User Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field to store the user information into. +| `properties` | no | [`username`, `roles`, `email`, `full_name`, `metadata`] | Controls what user related properties are added to the `field`. +|====== + +The following example adds all user details for the current authenticated user +to the `user` field for all documents that are processed by this pipeline: + +[source,js] +-------------------------------------------------- +{ + "processors" : [ + { + "set_security_user": { + "field": "user" + } + } + ] +} +-------------------------------------------------- +// NOTCONSOLE \ No newline at end of file From 6f3e97f2b7536e9d092ba80cb71d4ff3f7557734 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 21 Jun 2018 13:24:39 -0400 Subject: [PATCH 09/34] Test: Skip assertion on windows Windows doesn't provide consistent exception messages when it can't connect so skip the exception message assertion on windows. Closes #31457 --- .../client/RestClientMultipleHostsIntegTests.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java index d09741ea25b6c..7f5915fe3529d 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java @@ -42,9 +42,7 @@ import static org.elasticsearch.client.RestClientTestUtil.getAllStatusCodes; import static org.elasticsearch.client.RestClientTestUtil.randomErrorNoRetryStatusCode; import static org.elasticsearch.client.RestClientTestUtil.randomOkStatusCode; -import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -216,8 +214,10 @@ public void testNodeSelector() throws IOException { restClient.performRequest(request); fail("expected to fail to connect"); } catch (ConnectException e) { - // This is different in windows and linux but this matches both. - assertThat(e.getMessage(), startsWith("Connection refused")); + // Windows isn't consistent here. Sometimes the message is even null! + if (false == System.getProperty("os.name").startsWith("Windows")) { + assertEquals("Connection refused", e.getMessage()); + } } } else { Response response = restClient.performRequest(request); From 048a92bf39beecef1fa33e820699d79bdcd9133b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 21 Jun 2018 13:50:46 -0400 Subject: [PATCH 10/34] Rename createNewTranslog to fileBasedRecovery (#31508) We renamed `createNewTranslog` to `fileBasedRecovery` in the RecoveryTarget but did not do this for RecoverySourceHandler. This commit makes sure that we a consistent parameter in both recovery source and target. --- .../elasticsearch/indices/recovery/RecoverySourceHandler.java | 4 ++-- .../indices/recovery/RecoverySourceHandlerTests.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 72a6fcb6ba329..45500349865f7 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -449,13 +449,13 @@ public void phase1(final IndexCommit snapshot, final Supplier translogO } } - void prepareTargetForTranslog(final boolean createNewTranslog, final int totalTranslogOps) throws IOException { + void prepareTargetForTranslog(final boolean fileBasedRecovery, final int totalTranslogOps) throws IOException { StopWatch stopWatch = new StopWatch().start(); logger.trace("recovery [phase1]: prepare remote engine for translog"); final long startEngineStart = stopWatch.totalTime().millis(); // Send a request preparing the new shard's translog to receive operations. This ensures the shard engine is started and disables // garbage collection (not the JVM's GC!) of tombstone deletes. - cancellableThreads.executeIO(() -> recoveryTarget.prepareForTranslogOperations(createNewTranslog, totalTranslogOps)); + cancellableThreads.executeIO(() -> recoveryTarget.prepareForTranslogOperations(fileBasedRecovery, totalTranslogOps)); stopWatch.stop(); response.startTime = stopWatch.totalTime().millis() - startEngineStart; diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 5ade55ef5340c..6be6d7e80bccb 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -423,7 +423,7 @@ public void phase1(final IndexCommit snapshot, final Supplier translogO } @Override - void prepareTargetForTranslog(final boolean createNewTranslog, final int totalTranslogOps) throws IOException { + void prepareTargetForTranslog(final boolean fileBasedRecovery, final int totalTranslogOps) throws IOException { prepareTargetForTranslogCalled.set(true); } From 60204af0cbcf43c83e86b4ddc8426111139f4838 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 21 Jun 2018 11:13:19 -0700 Subject: [PATCH 11/34] [DOCS] Remove fixed file from build.gradle --- x-pack/docs/build.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 9abca910c5dfc..6c0a4bfcac647 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -11,7 +11,6 @@ apply plugin: 'elasticsearch.docs-test' buildRestTests.expectedUnconvertedCandidates = [ 'en/rest-api/watcher/put-watch.asciidoc', 'en/security/authentication/user-cache.asciidoc', - 'en/security/authorization/field-and-document-access-control.asciidoc', 'en/security/authorization/run-as-privilege.asciidoc', 'en/security/ccs-clients-integrations/http.asciidoc', 'en/security/authorization/custom-roles-provider.asciidoc', From 4f9332ee16bcc422144a866122eb8bc2df2c0040 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 21 Jun 2018 11:25:26 -0700 Subject: [PATCH 12/34] Core: Remove ThreadPool from base TransportAction (#31492) Most transport actions don't need the node ThreadPool. This commit removes the ThreadPool as a super constructor parameter for TransportAction. The actions that do need the thread pool then have a member added to keep it from their own constructor. --- .../action/bulk/TransportNoopBulkAction.java | 8 +++---- .../search/TransportNoopSearchAction.java | 9 +++----- .../ingest/common/GrokProcessorGetAction.java | 6 ++--- .../TransportMultiSearchTemplateAction.java | 5 ++--- .../TransportSearchTemplateAction.java | 10 ++++----- .../painless/PainlessExecuteAction.java | 5 ++--- .../rankeval/TransportRankEvalAction.java | 5 ++--- .../reindex/TransportDeleteByQueryAction.java | 9 +++++--- .../index/reindex/TransportReindexAction.java | 5 +++-- .../reindex/TransportRethrottleAction.java | 4 ++-- .../reindex/TransportUpdateByQueryAction.java | 5 ++++- .../cancel/TransportCancelTasksAction.java | 4 ++-- .../tasks/get/TransportGetTaskAction.java | 4 +++- .../tasks/list/TransportListTasksAction.java | 4 ++-- .../remote/TransportRemoteInfoAction.java | 5 ++--- .../TransportClearIndicesCacheAction.java | 4 ++-- .../indices/flush/TransportFlushAction.java | 8 +++---- .../flush/TransportSyncedFlushAction.java | 5 ++--- .../forcemerge/TransportForceMergeAction.java | 4 ++-- .../get/TransportGetFieldMappingsAction.java | 5 ++--- .../recovery/TransportRecoveryAction.java | 4 ++-- .../refresh/TransportRefreshAction.java | 5 ++--- .../TransportIndicesSegmentsAction.java | 4 ++-- .../stats/TransportIndicesStatsAction.java | 4 ++-- .../get/TransportUpgradeStatusAction.java | 4 ++-- .../upgrade/post/TransportUpgradeAction.java | 4 ++-- .../query/TransportValidateQueryAction.java | 4 ++-- .../action/bulk/TransportBulkAction.java | 4 +++- .../action/bulk/TransportShardBulkAction.java | 2 ++ .../TransportFieldCapabilitiesAction.java | 4 +++- .../action/get/TransportMultiGetAction.java | 5 ++--- .../SimulatePipelineTransportAction.java | 2 +- .../action/main/TransportMainAction.java | 5 ++--- .../search/TransportClearScrollAction.java | 5 ++--- .../search/TransportMultiSearchAction.java | 7 ++++-- .../action/search/TransportSearchAction.java | 5 +++-- .../search/TransportSearchScrollAction.java | 5 ++--- .../support/HandledTransportAction.java | 22 +++++++++---------- .../action/support/TransportAction.java | 6 +---- .../broadcast/TransportBroadcastAction.java | 4 ++-- .../node/TransportBroadcastByNodeAction.java | 8 ++----- .../master/TransportMasterNodeAction.java | 9 ++++---- .../support/nodes/TransportNodesAction.java | 4 +++- .../TransportBroadcastReplicationAction.java | 5 ++--- .../TransportReplicationAction.java | 4 +++- ...ransportInstanceSingleOperationAction.java | 5 ++++- .../shard/TransportSingleShardAction.java | 4 +++- .../support/tasks/TransportTasksAction.java | 4 ++-- .../TransportMultiTermVectorsAction.java | 5 ++--- .../action/ActionModuleTests.java | 5 ++--- .../node/tasks/TaskManagerTestCase.java | 5 ++--- .../cluster/node/tasks/TestTaskPlugin.java | 9 +++----- .../node/tasks/TransportTasksActionTests.java | 8 +++---- .../action/main/MainActionTests.java | 4 +--- .../TransportActionFilterChainTests.java | 4 ++-- .../TransportBroadcastByNodeActionTests.java | 2 +- .../BroadcastReplicationTests.java | 10 ++++----- .../client/node/NodeClientHeadersTests.java | 2 +- .../persistent/TestPersistentTasksPlugin.java | 8 +++---- .../core/action/TransportXPackInfoAction.java | 11 +++++----- .../TransportGetCertificateInfoAction.java | 6 ++--- .../action/TransportXPackInfoActionTests.java | 9 ++++---- .../action/TransportGraphExploreAction.java | 8 +++---- .../ml/action/TransportCloseJobAction.java | 4 +++- .../action/TransportDeleteCalendarAction.java | 9 ++++---- .../TransportDeleteCalendarEventAction.java | 8 +++---- .../TransportDeleteExpiredDataAction.java | 4 +++- .../action/TransportDeleteFilterAction.java | 7 +++--- .../TransportDeleteModelSnapshotAction.java | 6 ++--- .../ml/action/TransportFlushJobAction.java | 4 ++-- .../ml/action/TransportForecastJobAction.java | 4 ++-- .../ml/action/TransportGetBucketsAction.java | 11 +++++----- .../TransportGetCalendarEventsAction.java | 7 +++--- .../action/TransportGetCalendarsAction.java | 5 ++--- .../action/TransportGetCategoriesAction.java | 9 ++++---- .../ml/action/TransportGetFiltersAction.java | 5 ++--- .../action/TransportGetInfluencersAction.java | 11 +++++----- .../action/TransportGetJobsStatsAction.java | 4 ++-- .../TransportGetModelSnapshotsAction.java | 5 ++--- .../TransportGetOverallBucketsAction.java | 4 +++- .../ml/action/TransportGetRecordsAction.java | 11 +++++----- .../TransportIsolateDatafeedAction.java | 7 +++--- .../ml/action/TransportJobTaskAction.java | 7 +++--- .../ml/action/TransportKillProcessAction.java | 7 +++--- .../ml/action/TransportMlInfoAction.java | 5 ++--- .../ml/action/TransportPersistJobAction.java | 4 ++-- .../TransportPostCalendarEventsAction.java | 5 ++--- .../ml/action/TransportPostDataAction.java | 4 ++-- .../TransportPreviewDatafeedAction.java | 4 +++- .../ml/action/TransportPutCalendarAction.java | 7 +++--- .../ml/action/TransportPutFilterAction.java | 10 ++++----- .../action/TransportStopDatafeedAction.java | 4 +++- .../TransportUpdateCalendarJobAction.java | 5 ++--- .../TransportUpdateModelSnapshotAction.java | 5 ++--- .../action/TransportUpdateProcessAction.java | 7 +++--- .../TransportValidateDetectorAction.java | 10 ++++----- .../TransportValidateJobConfigAction.java | 10 ++++----- .../action/TransportMonitoringBulkAction.java | 4 +++- .../action/TransportGetRollupCapsAction.java | 5 ++--- .../action/TransportGetRollupJobAction.java | 4 ++-- .../action/TransportRollupSearchAction.java | 4 ++-- .../action/TransportStartRollupAction.java | 4 ++-- .../action/TransportStopRollupAction.java | 4 ++-- .../role/TransportDeleteRoleAction.java | 5 ++--- .../action/role/TransportGetRolesAction.java | 5 ++--- .../action/role/TransportPutRoleAction.java | 5 ++--- .../TransportDeleteRoleMappingAction.java | 5 ++--- .../TransportGetRoleMappingsAction.java | 13 +++++------ .../TransportPutRoleMappingAction.java | 5 ++--- .../saml/TransportSamlAuthenticateAction.java | 4 +++- .../TransportSamlInvalidateSessionAction.java | 5 ++--- .../saml/TransportSamlLogoutAction.java | 5 ++--- ...nsportSamlPrepareAuthenticationAction.java | 9 ++++---- .../token/TransportCreateTokenAction.java | 4 +++- .../token/TransportInvalidateTokenAction.java | 5 ++--- .../token/TransportRefreshTokenAction.java | 5 ++--- .../user/TransportAuthenticateAction.java | 9 ++++---- .../user/TransportChangePasswordAction.java | 5 ++--- .../user/TransportDeleteUserAction.java | 9 ++++---- .../action/user/TransportGetUsersAction.java | 5 ++--- .../user/TransportHasPrivilegesAction.java | 4 +++- .../action/user/TransportPutUserAction.java | 5 ++--- .../user/TransportSetEnabledAction.java | 4 +++- .../role/TransportDeleteRoleActionTests.java | 7 +++--- .../role/TransportGetRolesActionTests.java | 9 ++++---- .../role/TransportPutRoleActionTests.java | 10 +++------ .../TransportGetRoleMappingsActionTests.java | 16 ++++++-------- .../TransportPutRoleMappingActionTests.java | 16 ++++++-------- ...sportSamlInvalidateSessionActionTests.java | 3 +-- .../saml/TransportSamlLogoutActionTests.java | 2 +- .../TransportAuthenticateActionTests.java | 7 +++--- .../TransportChangePasswordActionTests.java | 9 ++++---- .../user/TransportDeleteUserActionTests.java | 16 ++++++-------- .../user/TransportGetUsersActionTests.java | 12 +++++----- .../user/TransportPutUserActionTests.java | 12 +++++----- .../plugin/TransportSqlClearCursorAction.java | 5 ++--- .../sql/plugin/TransportSqlQueryAction.java | 5 ++--- .../plugin/TransportSqlTranslateAction.java | 9 +++----- .../actions/WatcherTransportAction.java | 4 ++-- .../actions/ack/TransportAckWatchAction.java | 5 ++--- .../TransportActivateWatchAction.java | 8 +++---- .../delete/TransportDeleteWatchAction.java | 10 ++++----- .../execute/TransportExecuteWatchAction.java | 4 +++- .../actions/get/TransportGetWatchAction.java | 5 ++--- .../actions/put/TransportPutWatchAction.java | 4 +++- .../ack/TransportAckWatchActionTests.java | 2 +- 146 files changed, 415 insertions(+), 483 deletions(-) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java index 0c1065ad13145..0f6748b5e826c 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java @@ -19,8 +19,8 @@ package org.elasticsearch.plugin.noop.action.bulk; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; @@ -30,7 +30,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; public class TransportNoopBulkAction extends HandledTransportAction { @@ -38,9 +37,8 @@ public class TransportNoopBulkAction extends HandledTransportAction { @Inject - public TransportNoopSearchAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters) { - super(settings, NoopSearchAction.NAME, threadPool, transportService, actionFilters, - (Writeable.Reader) SearchRequest::new); + public TransportNoopSearchAction(Settings settings, TransportService transportService, ActionFilters actionFilters) { + super(settings, NoopSearchAction.NAME, transportService, actionFilters, (Writeable.Reader) SearchRequest::new); } @Override diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index 77ad363b50680..85a8f5e48079c 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -41,7 +41,6 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestBuilderListener; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -114,9 +113,8 @@ public void writeTo(StreamOutput out) throws IOException { public static class TransportAction extends HandledTransportAction { @Inject - public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters) { - super(settings, NAME, threadPool, transportService, actionFilters, Request::new); + public TransportAction(Settings settings, TransportService transportService, ActionFilters actionFilters) { + super(settings, NAME, transportService, actionFilters, Request::new); } @Override diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java index 7451c89cdb494..79fea3c6d62cd 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; @@ -45,10 +44,10 @@ public class TransportMultiSearchTemplateAction extends HandledTransportAction) SearchTemplateRequest::new); + public TransportSearchTemplateAction(Settings settings, TransportService transportService, ActionFilters actionFilters, + ScriptService scriptService, NamedXContentRegistry xContentRegistry, NodeClient client) { + super(settings, SearchTemplateAction.NAME, transportService, actionFilters, + (Supplier) SearchTemplateRequest::new); this.scriptService = scriptService; this.xContentRegistry = xContentRegistry; this.client = client; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index ea0664b2aa446..1bfd013b0d5a5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -48,7 +48,6 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -280,9 +279,9 @@ public static class TransportAction extends HandledTransportAction) RankEvalRequest::new); this.scriptService = scriptService; this.namedXContentRegistry = namedXContentRegistry; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java index 35aa8d77d104e..9be54f4f76104 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.reindex; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -35,7 +33,11 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.function.Supplier; + public class TransportDeleteByQueryAction extends HandledTransportAction { + + private final ThreadPool threadPool; private final Client client; private final ScriptService scriptService; private final ClusterService clusterService; @@ -43,8 +45,9 @@ public class TransportDeleteByQueryAction extends HandledTransportAction) DeleteByQueryRequest::new); + this.threadPool = threadPool; this.client = client; this.scriptService = scriptService; this.clusterService = clusterService; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 3db3a0d2a9123..62be1e2cb613a 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -92,6 +92,7 @@ public class TransportReindexAction extends HandledTransportAction> REMOTE_CLUSTER_WHITELIST = Setting.listSetting("reindex.remote.whitelist", emptyList(), Function.identity(), Property.NodeScope); + private final ThreadPool threadPool; private final ClusterService clusterService; private final ScriptService scriptService; private final AutoCreateIndex autoCreateIndex; @@ -103,8 +104,8 @@ public class TransportReindexAction extends HandledTransportAction { + + private final ThreadPool threadPool; private final Client client; private final ScriptService scriptService; private final ClusterService clusterService; @@ -53,8 +55,9 @@ public class TransportUpdateByQueryAction extends HandledTransportAction) UpdateByQueryRequest::new); + this.threadPool = threadPool; this.client = client; this.scriptService = scriptService; this.clusterService = clusterService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index 3bd451538f0a3..b99630dd4f960 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -62,9 +62,9 @@ public class TransportCancelTasksAction extends TransportTasksAction */ public class TransportGetTaskAction extends HandledTransportAction { + private final ThreadPool threadPool; private final ClusterService clusterService; private final TransportService transportService; private final Client client; @@ -72,7 +73,8 @@ public class TransportGetTaskAction extends HandledTransportAction) RemoteInfoRequest::new); this.remoteClusterService = searchTransportService.getRemoteClusterService(); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index eda82fb710ca0..4609f048caa83 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -49,10 +49,10 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAc private final IndicesService indicesService; @Inject - public TransportClearIndicesCacheAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportClearIndicesCacheAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, ClearIndicesCacheAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, ClearIndicesCacheAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, ClearIndicesCacheRequest::new, ThreadPool.Names.MANAGEMENT, false); this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java index 91755388320a3..7df54c1f123a1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.List; @@ -39,11 +38,10 @@ public class TransportFlushAction extends TransportBroadcastReplicationAction { @Inject - public TransportFlushAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, + public TransportFlushAction(Settings settings, ClusterService clusterService, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportShardFlushAction replicatedFlushAction) { - super(FlushAction.NAME, FlushRequest::new, settings, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedFlushAction); + super(FlushAction.NAME, FlushRequest::new, settings, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedFlushAction); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java index fb4928ab0d4d3..9762fe6cbb814 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.flush.SyncedFlushService; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; /** @@ -38,9 +37,9 @@ public class TransportSyncedFlushAction extends HandledTransportAction) SyncedFlushRequest::new); this.syncedFlushService = syncedFlushService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java index 94f27a93624d5..94357575a9f72 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java @@ -48,10 +48,10 @@ public class TransportForceMergeAction extends TransportBroadcastByNodeAction { @Inject - public TransportRefreshAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportRefreshAction(Settings settings, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportShardRefreshAction shardRefreshAction) { - super(RefreshAction.NAME, RefreshRequest::new, settings, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, shardRefreshAction); + super(RefreshAction.NAME, RefreshRequest::new, settings, clusterService, transportService, actionFilters, indexNameExpressionResolver, shardRefreshAction); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java index 94b12c9ab17d5..6b624e6baa792 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java @@ -46,9 +46,9 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastByNodeActi private final IndicesService indicesService; @Inject - public TransportIndicesSegmentsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + public TransportIndicesSegmentsAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, IndicesSegmentsAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + super(settings, IndicesSegmentsAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, IndicesSegmentsRequest::new, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index 6f2aaa063011f..9668a1a41fac5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -47,10 +47,10 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction< private final IndicesService indicesService; @Inject - public TransportIndicesStatsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportIndicesStatsAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, IndicesStatsAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + super(settings, IndicesStatsAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, IndicesStatsRequest::new, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java index 19566acaf7af4..603b25f6ab414 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java @@ -48,9 +48,9 @@ public class TransportUpgradeStatusAction extends TransportBroadcastByNodeAction private final IndicesService indicesService; @Inject - public TransportUpgradeStatusAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + public TransportUpgradeStatusAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, UpgradeStatusAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + super(settings, UpgradeStatusAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, UpgradeStatusRequest::new, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java index 0bc2134cb505a..dda4a5203ff68 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -62,10 +62,10 @@ public class TransportUpgradeAction extends TransportBroadcastByNodeAction { + private final ThreadPool threadPool; private final AutoCreateIndex autoCreateIndex; private final ClusterService clusterService; private final IngestService ingestService; @@ -108,8 +109,9 @@ public TransportBulkAction(Settings settings, ThreadPool threadPool, TransportSe TransportShardBulkAction shardBulkAction, NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, AutoCreateIndex autoCreateIndex, LongSupplier relativeTimeProvider) { - super(settings, BulkAction.NAME, threadPool, transportService, actionFilters, BulkRequest::new); + super(settings, BulkAction.NAME, transportService, actionFilters, BulkRequest::new); Objects.requireNonNull(relativeTimeProvider); + this.threadPool = threadPool; this.clusterService = clusterService; this.ingestService = ingestService; this.shardBulkAction = shardBulkAction; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index a1f0965d110b2..7fc58b667c579 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -76,6 +76,7 @@ public class TransportShardBulkAction extends TransportWriteAction { + private final ThreadPool threadPool; private final ClusterService clusterService; private final TransportFieldCapabilitiesIndexAction shardAction; private final RemoteClusterService remoteClusterService; @@ -53,7 +54,8 @@ public TransportFieldCapabilitiesAction(Settings settings, TransportService tran ClusterService clusterService, ThreadPool threadPool, TransportFieldCapabilitiesIndexAction shardAction, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, FieldCapabilitiesAction.NAME, threadPool, transportService, actionFilters, FieldCapabilitiesRequest::new); + super(settings, FieldCapabilitiesAction.NAME, transportService, actionFilters, FieldCapabilitiesRequest::new); + this.threadPool = threadPool; this.clusterService = clusterService; this.remoteClusterService = transportService.getRemoteClusterService(); this.shardAction = shardAction; diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java index 0e54539d885c4..f7ad0f6c87fd0 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -44,10 +43,10 @@ public class TransportMultiGetAction extends HandledTransportAction) SimulatePipelineRequest::new); this.pipelineStore = nodeService.getIngestService().getPipelineStore(); this.executionService = new SimulateExecutionService(threadPool); diff --git a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java index 18e704be69c24..48612a68901dd 100644 --- a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; public class TransportMainAction extends HandledTransportAction { @@ -38,9 +37,9 @@ public class TransportMainAction extends HandledTransportAction { @@ -34,10 +33,10 @@ public class TransportClearScrollAction extends HandledTransportAction { private final int availableProcessors; + private final ThreadPool threadPool; private final ClusterService clusterService; private final LongSupplier relativeTimeProvider; private final NodeClient client; @@ -49,7 +50,8 @@ public class TransportMultiSearchAction extends HandledTransportAction SHARD_COUNT_LIMIT_SETTING = Setting.longSetting( "action.search.shard_count.limit", Long.MAX_VALUE, 1L, Property.Dynamic, Property.NodeScope); + private final ThreadPool threadPool; private final ClusterService clusterService; private final SearchTransportService searchTransportService; private final RemoteClusterService remoteClusterService; @@ -82,8 +83,8 @@ public TransportSearchAction(Settings settings, ThreadPool threadPool, Transport SearchTransportService searchTransportService, SearchPhaseController searchPhaseController, ClusterService clusterService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, SearchAction.NAME, threadPool, transportService, actionFilters, - (Writeable.Reader) SearchRequest::new); + super(settings, SearchAction.NAME, transportService, actionFilters, (Writeable.Reader) SearchRequest::new); + this.threadPool = threadPool; this.searchPhaseController = searchPhaseController; this.searchTransportService = searchTransportService; this.remoteClusterService = searchTransportService.getRemoteClusterService(); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java index 77425ecd5dbb2..953152eaad003 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import static org.elasticsearch.action.search.ParsedScrollId.QUERY_AND_FETCH_TYPE; @@ -41,10 +40,10 @@ public class TransportSearchScrollAction extends HandledTransportAction) SearchScrollRequest::new); this.clusterService = clusterService; this.searchTransportService = searchTransportService; diff --git a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index d6febf828765b..7cdcd017b9946 100644 --- a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -37,29 +37,27 @@ */ public abstract class HandledTransportAction extends TransportAction { - protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, - Supplier request) { - this(settings, actionName, true, threadPool, transportService, actionFilters, request); + protected HandledTransportAction(Settings settings, String actionName, TransportService transportService, + ActionFilters actionFilters, Supplier request) { + this(settings, actionName, true, transportService, actionFilters, request); } - protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, + protected HandledTransportAction(Settings settings, String actionName, TransportService transportService, ActionFilters actionFilters, Writeable.Reader requestReader) { - this(settings, actionName, true, threadPool, transportService, actionFilters, requestReader); + this(settings, actionName, true, transportService, actionFilters, requestReader); } - protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - Supplier request) { - super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); + protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, + TransportService transportService, ActionFilters actionFilters, Supplier request) { + super(settings, actionName, actionFilters, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, request, ThreadPool.Names.SAME, false, canTripCircuitBreaker, new TransportHandler()); } - protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, ThreadPool threadPool, + protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, TransportService transportService, ActionFilters actionFilters, Writeable.Reader requestReader) { - super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); + super(settings, actionName, actionFilters, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, ThreadPool.Names.SAME, false, canTripCircuitBreaker, requestReader, new TransportHandler()); } diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java index c8d9849c2e58a..85167cfe0f8e9 100644 --- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -29,21 +29,17 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskListener; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.threadpool.ThreadPool; import java.util.concurrent.atomic.AtomicInteger; public abstract class TransportAction extends AbstractComponent { - protected final ThreadPool threadPool; protected final String actionName; private final ActionFilter[] filters; protected final TaskManager taskManager; - protected TransportAction(Settings settings, String actionName, ThreadPool threadPool, ActionFilters actionFilters, - TaskManager taskManager) { + protected TransportAction(Settings settings, String actionName, ActionFilters actionFilters, TaskManager taskManager) { super(settings); - this.threadPool = threadPool; this.actionName = actionName; this.filters = actionFilters.filters(); this.taskManager = taskManager; diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index ff8012f8e37fb..8a28c2c9d891d 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -58,10 +58,10 @@ public abstract class TransportBroadcastAction request, Supplier shardRequest, String shardExecutor) { - super(settings, actionName, threadPool, transportService, actionFilters, request); + super(settings, actionName, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; this.indexNameExpressionResolver = indexNameExpressionResolver; diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index ca50e2acd147e..dac1a55b6361f 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -88,21 +88,18 @@ public abstract class TransportBroadcastByNodeAction request, String executor) { - this(settings, actionName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, request, - executor, true); + this(settings, actionName, clusterService, transportService, actionFilters, indexNameExpressionResolver, request, executor, true); } public TransportBroadcastByNodeAction( Settings settings, String actionName, - ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, @@ -110,8 +107,7 @@ public TransportBroadcastByNodeAction( Supplier request, String executor, boolean canTripCircuitBreaker) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, - request); + super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 1881db0f13e42..8f198c4b82e6f 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -54,6 +54,7 @@ * A base class for operations that needs to be performed on the master node. */ public abstract class TransportMasterNodeAction, Response extends ActionResponse> extends HandledTransportAction { + protected final ThreadPool threadPool; protected final TransportService transportService; protected final ClusterService clusterService; protected final IndexNameExpressionResolver indexNameExpressionResolver; @@ -75,10 +76,10 @@ protected TransportMasterNodeAction(Settings settings, String actionName, Transp protected TransportMasterNodeAction(Settings settings, String actionName, boolean canTripCircuitBreaker, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Supplier request) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, - request); + super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request); this.transportService = transportService; this.clusterService = clusterService; + this.threadPool = threadPool; this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor(); } @@ -87,10 +88,10 @@ protected TransportMasterNodeAction(Settings settings, String actionName, boolea TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, Writeable.Reader request, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, request - ); + super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request); this.transportService = transportService; this.clusterService = clusterService; + this.threadPool = threadPool; this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor(); } diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index d47e156680e28..7a074c91c7152 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -54,6 +54,7 @@ public abstract class TransportNodesAction extends HandledTransportAction { + protected final ThreadPool threadPool; protected final ClusterService clusterService; protected final TransportService transportService; protected final Class nodeResponseClass; @@ -64,7 +65,8 @@ protected TransportNodesAction(Settings settings, String actionName, ThreadPool ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, Supplier request, Supplier nodeRequest, String nodeExecutor, Class nodeResponseClass) { - super(settings, actionName, threadPool, transportService, actionFilters, request); + super(settings, actionName, transportService, actionFilters, request); + this.threadPool = threadPool; this.clusterService = Objects.requireNonNull(clusterService); this.transportService = Objects.requireNonNull(transportService); this.nodeResponseClass = Objects.requireNonNull(nodeResponseClass); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index d3d54880f504f..50e0cc3af7f7b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -38,7 +38,6 @@ import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; @@ -58,10 +57,10 @@ public abstract class TransportBroadcastReplicationAction request, Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportBroadcastReplicationAction(String name, Supplier request, Settings settings, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportReplicationAction replicatedBroadcastShardAction) { - super(settings, name, threadPool, transportService, actionFilters, request); + super(settings, name, transportService, actionFilters, request); this.replicatedBroadcastShardAction = replicatedBroadcastShardAction; this.clusterService = clusterService; this.indexNameExpressionResolver = indexNameExpressionResolver; diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 97f985806168b..d7c908bf9fa5b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -100,6 +100,7 @@ public abstract class TransportReplicationAction< Response extends ReplicationResponse > extends TransportAction { + protected final ThreadPool threadPool; protected final TransportService transportService; protected final ClusterService clusterService; protected final ShardStateAction shardStateAction; @@ -132,7 +133,8 @@ protected TransportReplicationAction(Settings settings, String actionName, Trans IndexNameExpressionResolver indexNameExpressionResolver, Supplier request, Supplier replicaRequest, String executor, boolean syncGlobalCheckpointAfterOperation) { - super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); + super(settings, actionName, actionFilters, transportService.getTaskManager()); + this.threadPool = threadPool; this.transportService = transportService; this.clusterService = clusterService; this.indicesService = indicesService; diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index c907c12ac5161..280a35207a9db 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -50,6 +50,8 @@ public abstract class TransportInstanceSingleOperationAction, Response extends ActionResponse> extends HandledTransportAction { + + protected final ThreadPool threadPool; protected final ClusterService clusterService; protected final TransportService transportService; protected final IndexNameExpressionResolver indexNameExpressionResolver; @@ -60,7 +62,8 @@ public abstract class TransportInstanceSingleOperationAction request) { - super(settings, actionName, threadPool, transportService, actionFilters, request); + super(settings, actionName, transportService, actionFilters, request); + this.threadPool = threadPool; this.clusterService = clusterService; this.transportService = transportService; this.indexNameExpressionResolver = indexNameExpressionResolver; diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 6c5d55c8c4404..d7e5633559d8a 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -60,6 +60,7 @@ */ public abstract class TransportSingleShardAction, Response extends ActionResponse> extends TransportAction { + protected final ThreadPool threadPool; protected final ClusterService clusterService; protected final TransportService transportService; protected final IndexNameExpressionResolver indexNameExpressionResolver; @@ -70,7 +71,8 @@ public abstract class TransportSingleShardAction request, String executor) { - super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); + super(settings, actionName, actionFilters, transportService.getTaskManager()); + this.threadPool = threadPool; this.clusterService = clusterService; this.transportService = transportService; this.indexNameExpressionResolver = indexNameExpressionResolver; diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index f852b5efb1aa3..5599dd5f98b06 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -77,10 +77,10 @@ public abstract class TransportTasksAction< protected final String transportNodeAction; - protected TransportTasksAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, + protected TransportTasksAction(Settings settings, String actionName, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, Supplier requestSupplier, Supplier responseSupplier, String nodeExecutor) { - super(settings, actionName, threadPool, transportService, actionFilters, requestSupplier); + super(settings, actionName, transportService, actionFilters, requestSupplier); this.clusterService = clusterService; this.transportService = transportService; this.transportNodeAction = actionName + "[n]"; diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java index 9a3fc7b84c287..f66d843ea6db4 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -45,10 +44,10 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction { - protected FakeTransportAction(Settings settings, String actionName, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, TaskManager taskManager) { - super(settings, actionName, threadPool, actionFilters, taskManager); + protected FakeTransportAction(Settings settings, String actionName, ActionFilters actionFilters, TaskManager taskManager) { + super(settings, actionName, actionFilters, taskManager); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 4cb9cd27e7fc7..20f4987008c53 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -192,9 +192,8 @@ protected TaskManager createTaskManager(Settings settings, ThreadPool threadPool clusterService = createClusterService(threadPool, discoveryNode.get()); clusterService.addStateApplier(transportService.getTaskManager()); ActionFilters actionFilters = new ActionFilters(emptySet()); - transportListTasksAction = new TransportListTasksAction(settings, threadPool, clusterService, transportService, actionFilters); - transportCancelTasksAction = new TransportCancelTasksAction(settings, threadPool, clusterService, - transportService, actionFilters); + transportListTasksAction = new TransportListTasksAction(settings, clusterService, transportService, actionFilters); + transportCancelTasksAction = new TransportCancelTasksAction(settings, clusterService, transportService, actionFilters); transportService.acceptIncomingRequests(); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index 0cfe532b8a012..a04c8d93c3a8c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -424,12 +424,9 @@ public static class TransportUnblockTestTasksAction extends TransportTasksAction UnblockTestTasksResponse, UnblockTestTaskResponse> { @Inject - public TransportUnblockTestTasksAction(Settings settings,ThreadPool threadPool, ClusterService - clusterService, - TransportService transportService) { - super(settings, UnblockTestTasksAction.NAME, threadPool, clusterService, transportService, new ActionFilters(new - HashSet<>()), - UnblockTestTasksRequest::new, UnblockTestTasksResponse::new, ThreadPool.Names.MANAGEMENT); + public TransportUnblockTestTasksAction(Settings settings, ClusterService clusterService, TransportService transportService) { + super(settings, UnblockTestTasksAction.NAME, clusterService, transportService, new ActionFilters(new HashSet<>()), + UnblockTestTasksRequest::new, UnblockTestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 33b815e4fbf22..9175bc69bf642 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -254,9 +254,9 @@ public void writeTo(StreamOutput out) throws IOException { */ abstract static class TestTasksAction extends TransportTasksAction { - protected TestTasksAction(Settings settings, String actionName, ThreadPool threadPool, + protected TestTasksAction(Settings settings, String actionName, ClusterService clusterService, TransportService transportService) { - super(settings, actionName, threadPool, clusterService, transportService, new ActionFilters(new HashSet<>()), + super(settings, actionName, clusterService, transportService, new ActionFilters(new HashSet<>()), TestTasksRequest::new, TestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } @@ -622,7 +622,7 @@ public void testTaskLevelActionFailures() throws ExecutionException, Interrupted for (int i = 0; i < testNodes.length; i++) { final int node = i; // Simulate task action that fails on one of the tasks on one of the nodes - tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", threadPool, testNodes[i].clusterService, + tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", testNodes[i].clusterService, testNodes[i].transportService) { @Override protected void taskOperation(TestTasksRequest request, Task task, ActionListener listener) { @@ -701,7 +701,7 @@ public void testTaskNodeFiltering() throws ExecutionException, InterruptedExcept final int node = i; // Simulate a task action that works on all nodes except nodes listed in filterNodes. // We are testing that it works. - tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", threadPool, + tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", testNodes[i].clusterService, testNodes[i].transportService) { @Override diff --git a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java index 1c1c0f9476de3..654a4a3649c35 100644 --- a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.Collections; @@ -68,8 +67,7 @@ public void testMainActionClusterAvailable() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportMainAction action = new TransportMainAction(settings, mock(ThreadPool.class), transportService, mock(ActionFilters.class), - clusterService); + TransportMainAction action = new TransportMainAction(settings, transportService, mock(ActionFilters.class), clusterService); AtomicReference responseRef = new AtomicReference<>(); action.doExecute(new MainRequest(), new ActionListener() { @Override diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 3a31422dcf83f..479ed2ad60d51 100644 --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -80,7 +80,7 @@ public void testActionFiltersRequest() throws ExecutionException, InterruptedExc String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = - new TransportAction(Settings.EMPTY, actionName, null, actionFilters, + new TransportAction(Settings.EMPTY, actionName, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override protected void doExecute(TestRequest request, ActionListener listener) { @@ -158,7 +158,7 @@ public void exe String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = new TransportAction(Settings.EMPTY, - actionName, null, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { + actionName, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override protected void doExecute(TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 6a7d443553888..61beb59bc0c24 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -118,7 +118,7 @@ class TestTransportBroadcastByNodeAction extends TransportBroadcastByNodeAction< private final Map shards = new HashMap<>(); TestTransportBroadcastByNodeAction(Settings settings, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Supplier request, String executor) { - super(settings, "indices:admin/test", THREAD_POOL, TransportBroadcastByNodeActionTests.this.clusterService, transportService, actionFilters, indexNameExpressionResolver, request, executor); + super(settings, "indices:admin/test", TransportBroadcastByNodeActionTests.this.clusterService, transportService, actionFilters, indexNameExpressionResolver, request, executor); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index f3033b017db98..bfa45bb072dcf 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -100,7 +100,7 @@ threadPool, BigArrays.NON_RECYCLING_INSTANCE, circuitBreakerService, new NamedWr TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet()); transportService.start(); transportService.acceptIncomingRequests(); - broadcastReplicationAction = new TestBroadcastReplicationAction(Settings.EMPTY, threadPool, clusterService, transportService, + broadcastReplicationAction = new TestBroadcastReplicationAction(Settings.EMPTY, clusterService, transportService, new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), null); } @@ -206,10 +206,10 @@ public void testShardsList() throws InterruptedException, ExecutionException { private class TestBroadcastReplicationAction extends TransportBroadcastReplicationAction { protected final Set>> capturedShardRequests = ConcurrentCollections.newConcurrentSet(); - TestBroadcastReplicationAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, - TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - TransportReplicationAction replicatedBroadcastShardAction) { - super("test-broadcast-replication-action", DummyBroadcastRequest::new, settings, threadPool, clusterService, transportService, + TestBroadcastReplicationAction(Settings settings, ClusterService clusterService, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + TransportReplicationAction replicatedBroadcastShardAction) { + super("test-broadcast-replication-action", DummyBroadcastRequest::new, settings, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedBroadcastShardAction); } diff --git a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index a289e9680b4aa..f473188a5424b 100644 --- a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -59,7 +59,7 @@ private Actions(Settings settings, ThreadPool threadPool, Action[] actions) { private static class InternalTransportAction extends TransportAction { private InternalTransportAction(Settings settings, String actionName, ThreadPool threadPool) { - super(settings, actionName, threadPool, EMPTY_FILTERS, new TaskManager(settings, threadPool, Collections.emptySet())); + super(settings, actionName, EMPTY_FILTERS, new TaskManager(settings, threadPool, Collections.emptySet())); } @Override diff --git a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java index e54641bef2f54..745b883656958 100644 --- a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java +++ b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java @@ -34,7 +34,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; @@ -511,10 +510,9 @@ public static class TransportTestTaskAction extends TransportTasksAction { @Inject - public TransportTestTaskAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, String nodeExecutor) { - super(settings, TestTaskAction.NAME, threadPool, clusterService, transportService, actionFilters, + public TransportTestTaskAction(Settings settings, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters) { + super(settings, TestTaskAction.NAME, clusterService, transportService, actionFilters, TestTasksRequest::new, TestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java index 7acbfa49368de..23dd0e12d44ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java @@ -10,15 +10,14 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackInfoResponse; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.license.XPackInfoResponse; +import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.license.XPackInfoResponse.LicenseInfo; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackBuild; import org.elasticsearch.xpack.core.XPackFeatureSet; -import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; -import org.elasticsearch.license.XPackInfoResponse.LicenseInfo; import java.util.Set; import java.util.stream.Collectors; @@ -29,9 +28,9 @@ public class TransportXPackInfoAction extends HandledTransportAction featureSets; @Inject - public TransportXPackInfoAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportXPackInfoAction(Settings settings, TransportService transportService, ActionFilters actionFilters, LicenseService licenseService, Set featureSets) { - super(settings, XPackInfoAction.NAME, threadPool, transportService, actionFilters, + super(settings, XPackInfoAction.NAME, transportService, actionFilters, XPackInfoRequest::new); this.licenseService = licenseService; this.featureSets = featureSets; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java index 16e2a74dac81a..a70d0693d5b37 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.ssl.cert.CertificateInfo; @@ -25,10 +24,9 @@ public class TransportGetCertificateInfoAction extends HandledTransportAction null, null, Collections.emptySet()); - TransportXPackInfoAction action = new TransportXPackInfoAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), licenseService, featureSets); + TransportXPackInfoAction action = new TransportXPackInfoAction(Settings.EMPTY, transportService, + mock(ActionFilters.class), licenseService, featureSets); License license = mock(License.class); long expiryDate = randomLong(); diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 1c1dfb476da7d..d45dd640a49ff 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -65,6 +65,7 @@ */ public class TransportGraphExploreAction extends HandledTransportAction { + private final ThreadPool threadPool; private final NodeClient client; protected final XPackLicenseState licenseState; @@ -83,10 +84,9 @@ protected boolean lessThan(Vertex a, Vertex b) { @Inject public TransportGraphExploreAction(Settings settings, ThreadPool threadPool, NodeClient client, - TransportService transportService, ActionFilters actionFilters, - XPackLicenseState licenseState) { - super(settings, GraphExploreAction.NAME, threadPool, transportService, actionFilters, - (Supplier)GraphExploreRequest::new); + TransportService transportService, ActionFilters actionFilters, XPackLicenseState licenseState) { + super(settings, GraphExploreAction.NAME, transportService, actionFilters, (Supplier)GraphExploreRequest::new); + this.threadPool = threadPool; this.client = client; this.licenseState = licenseState; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java index bc1d50c7cd99d..05810b943befb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java @@ -57,6 +57,7 @@ public class TransportCloseJobAction extends TransportTasksAction { + private final ThreadPool threadPool; private final Client client; private final ClusterService clusterService; private final Auditor auditor; @@ -67,8 +68,9 @@ public TransportCloseJobAction(Settings settings, TransportService transportServ ClusterService clusterService, Client client, Auditor auditor, PersistentTasksService persistentTasksService) { // We fork in innerTaskOperation(...), so we can use ThreadPool.Names.SAME here: - super(settings, CloseJobAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, CloseJobAction.NAME, clusterService, transportService, actionFilters, CloseJobAction.Request::new, CloseJobAction.Response::new, ThreadPool.Names.SAME); + this.threadPool = threadPool; this.client = client; this.clusterService = clusterService; this.auditor = auditor; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java index 0346e38deb2fa..38d88341ce3de 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; @@ -20,7 +18,6 @@ import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction; @@ -28,6 +25,8 @@ import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import java.util.function.Supplier; + import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -38,9 +37,9 @@ public class TransportDeleteCalendarAction extends HandledTransportAction) DeleteCalendarAction.Request::new); this.client = client; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java index 2e4b688fa2619..7b2311eba2d2d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarEventAction; @@ -41,10 +40,9 @@ public class TransportDeleteCalendarEventAction extends HandledTransportAction { + private final ThreadPool threadPool; private final Client client; private final ClusterService clusterService; @Inject public TransportDeleteExpiredDataAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, Client client, ClusterService clusterService) { - super(settings, DeleteExpiredDataAction.NAME, threadPool, transportService, actionFilters, DeleteExpiredDataAction.Request::new); + super(settings, DeleteExpiredDataAction.NAME, transportService, actionFilters, DeleteExpiredDataAction.Request::new); + this.threadPool = threadPool; this.client = ClientHelper.clientWithOrigin(client, ClientHelper.ML_ORIGIN); this.clusterService = clusterService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java index 4987c028696c6..79693e2279486 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java @@ -20,11 +20,10 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.DeleteFilterAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.action.DeleteFilterAction; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; @@ -44,9 +43,9 @@ public class TransportDeleteFilterAction extends HandledTransportAction) DeleteFilterAction.Request::new); this.clusterService = clusterService; this.client = client; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java index 23ca3693df632..ad22f84f6d468 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -37,10 +36,9 @@ public class TransportDeleteModelSnapshotAction extends HandledTransportAction { @Inject - public TransportFlushJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + public TransportFlushJobAction(Settings settings, TransportService transportService, ClusterService clusterService, ActionFilters actionFilters, AutodetectProcessManager processManager) { - super(settings, FlushJobAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, FlushJobAction.NAME, clusterService, transportService, actionFilters, FlushJobAction.Request::new, FlushJobAction.Response::new, ThreadPool.Names.SAME, processManager); // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java index 8b287db50381c..f42f7003b909c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java @@ -42,10 +42,10 @@ public class TransportForecastJobAction extends TransportJobTaskAction { private final JobProvider jobProvider; @@ -27,9 +26,9 @@ public class TransportGetBucketsAction extends HandledTransportAction) GetBucketsAction.Request::new); this.jobProvider = jobProvider; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java index da2d2d7970fc6..5647c72d44bd2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.GetCalendarEventsAction; @@ -20,9 +19,9 @@ import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.persistence.ScheduledEventsQueryBuilder; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.ScheduledEventsQueryBuilder; import java.util.Collections; import java.util.List; @@ -35,9 +34,9 @@ public class TransportGetCalendarEventsAction extends HandledTransportAction) GetCalendarEventsAction.Request::new); this.jobProvider = jobProvider; this.clusterService = clusterService; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java index 5645d1e1f2d26..46c252004a3c5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetCalendarsAction; import org.elasticsearch.xpack.core.ml.action.util.PageParams; @@ -26,9 +25,9 @@ public class TransportGetCalendarsAction extends HandledTransportAction { private final JobProvider jobProvider; @@ -26,9 +25,9 @@ public class TransportGetCategoriesAction extends HandledTransportAction) GetCategoriesAction.Request::new); this.jobProvider = jobProvider; this.client = client; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index 28d75956df059..1be7be4a5d2b3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.GetFiltersAction; @@ -49,9 +48,9 @@ public class TransportGetFiltersAction extends HandledTransportAction { private final JobProvider jobProvider; @@ -27,9 +26,9 @@ public class TransportGetInfluencersAction extends HandledTransportAction) GetInfluencersAction.Request::new); this.jobProvider = jobProvider; this.client = client; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index c0b383b55ced0..1182953dfc31e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -53,10 +53,10 @@ public class TransportGetJobsStatsAction extends TransportTasksAction) GetOverallBucketsAction.Request::new); + this.threadPool = threadPool; this.clusterService = clusterService; this.client = client; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java index 7c5fee97d5647..6943cd9a01c5e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java @@ -5,20 +5,19 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; -import org.elasticsearch.xpack.ml.job.persistence.RecordsQueryBuilder; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.RecordsQueryBuilder; + +import java.util.function.Supplier; public class TransportGetRecordsAction extends HandledTransportAction { @@ -27,9 +26,9 @@ public class TransportGetRecordsAction extends HandledTransportAction) GetRecordsAction.Request::new); this.jobProvider = jobProvider; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java index 0d3b8dfa38dbe..398a1007ff9c4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java @@ -18,12 +18,11 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.IsolateDatafeedAction; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -33,9 +32,9 @@ public class TransportIsolateDatafeedAction extends TransportTasksAction { @Inject - public TransportIsolateDatafeedAction(Settings settings, TransportService transportService, ThreadPool threadPool, + public TransportIsolateDatafeedAction(Settings settings, TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(settings, IsolateDatafeedAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, IsolateDatafeedAction.NAME, clusterService, transportService, actionFilters, IsolateDatafeedAction.Request::new, IsolateDatafeedAction.Response::new, MachineLearning.UTILITY_THREAD_POOL_NAME); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java index 881a5e7cc5b4b..bd489588da3c3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java @@ -15,13 +15,12 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.JobTaskRequest; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; @@ -39,11 +38,11 @@ public abstract class TransportJobTaskAction requestSupplier, Supplier responseSupplier, String nodeExecutor, AutodetectProcessManager processManager) { - super(settings, actionName, threadPool, clusterService, transportService, actionFilters, + super(settings, actionName, clusterService, transportService, actionFilters, requestSupplier, responseSupplier, nodeExecutor); this.processManager = processManager; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java index cc2f70eadeae5..40cec95fae211 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java @@ -15,14 +15,13 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.KillProcessAction; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.notifications.Auditor; @@ -34,10 +33,10 @@ public class TransportKillProcessAction extends TransportJobTaskAction) MlInfoAction.Request::new); this.clusterService = clusterService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java index 1fbbb7a368152..926395d65132c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java @@ -29,9 +29,9 @@ public class TransportPersistJobAction extends TransportJobTaskAction { @Inject - public TransportPersistJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + public TransportPersistJobAction(Settings settings, TransportService transportService, ClusterService clusterService, ActionFilters actionFilters, AutodetectProcessManager processManager) { - super(settings, PersistJobAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, PersistJobAction.NAME, clusterService, transportService, actionFilters, PersistJobAction.Request::new, PersistJobAction.Response::new, ThreadPool.Names.SAME, processManager); // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java index d92b4a21564c4..49cb7dc45c954 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction; @@ -44,9 +43,9 @@ public class TransportPostCalendarEventsAction extends HandledTransportAction { + private final ThreadPool threadPool; private final Client client; private final ClusterService clusterService; @Inject public TransportPreviewDatafeedAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, Client client, ClusterService clusterService) { - super(settings, PreviewDatafeedAction.NAME, threadPool, transportService, actionFilters, + super(settings, PreviewDatafeedAction.NAME, transportService, actionFilters, (Supplier) PreviewDatafeedAction.Request::new); + this.threadPool = threadPool; this.client = client; this.clusterService = clusterService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java index c135ab8322b05..82caa9a35a6c5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java @@ -20,10 +20,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; +import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -39,9 +38,9 @@ public class TransportPutCalendarAction extends HandledTransportAction) PutCalendarAction.Request::new); this.client = client; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java index a8cd2cc8134a5..cb35daef8668c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -19,13 +19,12 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.PutFilterAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; -import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.core.ml.action.PutFilterAction; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.job.JobManager; import java.io.IOException; import java.util.Collections; @@ -40,10 +39,9 @@ public class TransportPutFilterAction extends HandledTransportAction) PutFilterAction.Request::new); this.client = client; this.jobManager = jobManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java index e7455053d525d..cf7350a870e97 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java @@ -48,14 +48,16 @@ public class TransportStopDatafeedAction extends TransportTasksAction { + private final ThreadPool threadPool; private final PersistentTasksService persistentTasksService; @Inject public TransportStopDatafeedAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, ClusterService clusterService, PersistentTasksService persistentTasksService) { - super(settings, StopDatafeedAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, StopDatafeedAction.NAME, clusterService, transportService, actionFilters, StopDatafeedAction.Request::new, StopDatafeedAction.Response::new, MachineLearning.UTILITY_THREAD_POOL_NAME); + this.threadPool = threadPool; this.persistentTasksService = persistentTasksService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java index 8a163d5dd0cd6..fd19c7483bc05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; @@ -26,9 +25,9 @@ public class TransportUpdateCalendarJobAction extends HandledTransportAction { @Inject - public TransportUpdateProcessAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ClusterService clusterService, ActionFilters actionFilters, - AutodetectProcessManager processManager) { - super(settings, UpdateProcessAction.NAME, threadPool, clusterService, transportService, actionFilters, + public TransportUpdateProcessAction(Settings settings, TransportService transportService, ClusterService clusterService, + ActionFilters actionFilters, AutodetectProcessManager processManager) { + super(settings, UpdateProcessAction.NAME, clusterService, transportService, actionFilters, UpdateProcessAction.Request::new, UpdateProcessAction.Response::new, ThreadPool.Names.SAME, processManager); // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java index 4ae159f794895..284ae505a01e4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java @@ -5,24 +5,22 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; +import java.util.function.Supplier; + public class TransportValidateDetectorAction extends HandledTransportAction { @Inject - public TransportValidateDetectorAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters) { - super(settings, ValidateDetectorAction.NAME, threadPool, transportService, actionFilters, + public TransportValidateDetectorAction(Settings settings, TransportService transportService, ActionFilters actionFilters) { + super(settings, ValidateDetectorAction.NAME, transportService, actionFilters, (Supplier) ValidateDetectorAction.Request::new); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java index 990c673a8c1ed..dc2a8155c4d94 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java @@ -5,24 +5,22 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; +import java.util.function.Supplier; + public class TransportValidateJobConfigAction extends HandledTransportAction { @Inject - public TransportValidateJobConfigAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters) { - super(settings, ValidateJobConfigAction.NAME, threadPool, transportService, actionFilters, + public TransportValidateJobConfigAction(Settings settings, TransportService transportService, ActionFilters actionFilters) { + super(settings, ValidateJobConfigAction.NAME, transportService, actionFilters, (Supplier< ValidateJobConfigAction.Request>) ValidateJobConfigAction.Request::new); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java index b30d8b357c196..3ec30552a00b5 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java @@ -34,6 +34,7 @@ public class TransportMonitoringBulkAction extends HandledTransportAction { + private final ThreadPool threadPool; private final ClusterService clusterService; private final Exporters exportService; private final MonitoringService monitoringService; @@ -42,7 +43,8 @@ public class TransportMonitoringBulkAction extends HandledTransportAction) GetRollupCapsAction.Request::new); this.clusterService = clusterService; } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java index b0adf6f12b486..a72dbfbe6b94f 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java @@ -38,9 +38,9 @@ public class TransportGetRollupJobAction extends TransportTasksAction { @Inject - public TransportGetRollupJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + public TransportGetRollupJobAction(Settings settings, TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(settings, GetRollupJobsAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, GetRollupJobsAction.NAME, clusterService, transportService, actionFilters, GetRollupJobsAction.Request::new, GetRollupJobsAction.Response::new, ThreadPool.Names.SAME); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 48fb19bbe1552..a9f3dc5a1b786 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -84,10 +84,10 @@ public class TransportRollupSearchAction extends TransportAction { private final NativeRoleMappingStore roleMappingStore; @Inject - public TransportGetRoleMappingsAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, + public TransportGetRoleMappingsAction(Settings settings, ActionFilters actionFilters, TransportService transportService, NativeRoleMappingStore nativeRoleMappingStore) { - super(settings, GetRoleMappingsAction.NAME, threadPool, transportService, actionFilters, + super(settings, GetRoleMappingsAction.NAME, transportService, actionFilters, GetRoleMappingsRequest::new); this.roleMappingStore = nativeRoleMappingStore; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index cd65017940554..8e72a7d76e6ef 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; @@ -23,9 +22,9 @@ public class TransportPutRoleMappingAction private final NativeRoleMappingStore roleMappingStore; @Inject - public TransportPutRoleMappingAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, + public TransportPutRoleMappingAction(Settings settings, ActionFilters actionFilters, TransportService transportService, NativeRoleMappingStore roleMappingStore) { - super(settings, PutRoleMappingAction.NAME, threadPool, transportService, actionFilters, + super(settings, PutRoleMappingAction.NAME, transportService, actionFilters, PutRoleMappingRequest::new); this.roleMappingStore = roleMappingStore; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java index 2d43717b65ff3..3d0965b96aa9c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java @@ -31,6 +31,7 @@ */ public final class TransportSamlAuthenticateAction extends HandledTransportAction { + private final ThreadPool threadPool; private final AuthenticationService authenticationService; private final TokenService tokenService; @@ -38,7 +39,8 @@ public final class TransportSamlAuthenticateAction extends HandledTransportActio public TransportSamlAuthenticateAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, AuthenticationService authenticationService, TokenService tokenService) { - super(settings, SamlAuthenticateAction.NAME, threadPool, transportService, actionFilters, SamlAuthenticateRequest::new); + super(settings, SamlAuthenticateAction.NAME, transportService, actionFilters, SamlAuthenticateRequest::new); + this.threadPool = threadPool; this.authenticationService = authenticationService; this.tokenService = tokenService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java index f4a3d35376d21..778364bf5c1a5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionAction; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionRequest; @@ -47,9 +46,9 @@ public final class TransportSamlInvalidateSessionAction private final Realms realms; @Inject - public TransportSamlInvalidateSessionAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportSamlInvalidateSessionAction(Settings settings, TransportService transportService, ActionFilters actionFilters, TokenService tokenService, Realms realms) { - super(settings, SamlInvalidateSessionAction.NAME, threadPool, transportService, actionFilters, SamlInvalidateSessionRequest::new); + super(settings, SamlInvalidateSessionAction.NAME, transportService, actionFilters, SamlInvalidateSessionRequest::new); this.tokenService = tokenService; this.realms = realms; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java index 11bc64e7f1839..43873c5bcadf4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutAction; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutRequest; @@ -41,9 +40,9 @@ public final class TransportSamlLogoutAction private final TokenService tokenService; @Inject - public TransportSamlLogoutAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportSamlLogoutAction(Settings settings, TransportService transportService, ActionFilters actionFilters, Realms realms, TokenService tokenService) { - super(settings, SamlLogoutAction.NAME, threadPool, transportService, actionFilters, SamlLogoutRequest::new); + super(settings, SamlLogoutAction.NAME, transportService, actionFilters, SamlLogoutRequest::new); this.realms = realms; this.tokenService = tokenService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java index 9d1619cdd5579..58eb5ccc59ce1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java @@ -5,15 +5,12 @@ */ package org.elasticsearch.xpack.security.action.saml; -import java.util.List; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationAction; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationRequest; @@ -24,6 +21,8 @@ import org.elasticsearch.xpack.security.authc.saml.SamlUtils; import org.opensaml.saml.saml2.core.AuthnRequest; +import java.util.List; + import static org.elasticsearch.xpack.security.authc.saml.SamlRealm.findSamlRealms; /** @@ -35,9 +34,9 @@ public final class TransportSamlPrepareAuthenticationAction private final Realms realms; @Inject - public TransportSamlPrepareAuthenticationAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportSamlPrepareAuthenticationAction(Settings settings, TransportService transportService, ActionFilters actionFilters, Realms realms) { - super(settings, SamlPrepareAuthenticationAction.NAME, threadPool, transportService, actionFilters, + super(settings, SamlPrepareAuthenticationAction.NAME, transportService, actionFilters, SamlPrepareAuthenticationRequest::new); this.realms = realms; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java index 9959f0c676e85..60d3086763a09 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java @@ -32,13 +32,15 @@ public final class TransportCreateTokenAction extends HandledTransportAction { private static final String DEFAULT_SCOPE = "full"; + private final ThreadPool threadPool; private final TokenService tokenService; private final AuthenticationService authenticationService; @Inject public TransportCreateTokenAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, TokenService tokenService, AuthenticationService authenticationService) { - super(settings, CreateTokenAction.NAME, threadPool, transportService, actionFilters, CreateTokenRequest::new); + super(settings, CreateTokenAction.NAME, transportService, actionFilters, CreateTokenRequest::new); + this.threadPool = threadPool; this.tokenService = tokenService; this.authenticationService = authenticationService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java index 1c70adfb8f995..7b280087d617b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenAction; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenRequest; @@ -25,9 +24,9 @@ public final class TransportInvalidateTokenAction extends HandledTransportAction private final TokenService tokenService; @Inject - public TransportInvalidateTokenAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportInvalidateTokenAction(Settings settings, TransportService transportService, ActionFilters actionFilters, TokenService tokenService) { - super(settings, InvalidateTokenAction.NAME, threadPool, transportService, actionFilters, + super(settings, InvalidateTokenAction.NAME, transportService, actionFilters, InvalidateTokenRequest::new); this.tokenService = tokenService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java index 761fa5e8349e1..601ee944dd82b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequest; import org.elasticsearch.xpack.core.security.action.token.CreateTokenResponse; @@ -24,9 +23,9 @@ public class TransportRefreshTokenAction extends HandledTransportAction { private final SecurityContext securityContext; @Inject - public TransportAuthenticateAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportAuthenticateAction(Settings settings, TransportService transportService, ActionFilters actionFilters, SecurityContext securityContext) { - super(settings, AuthenticateAction.NAME, threadPool, transportService, actionFilters, + super(settings, AuthenticateAction.NAME, transportService, actionFilters, (Supplier) AuthenticateRequest::new); this.securityContext = securityContext; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java index 8f0256b7e7773..78b4ae0193655 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; @@ -25,9 +24,9 @@ public class TransportChangePasswordAction extends HandledTransportAction { private final NativeUsersStore usersStore; @Inject - public TransportDeleteUserAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, + public TransportDeleteUserAction(Settings settings, ActionFilters actionFilters, NativeUsersStore usersStore, TransportService transportService) { - super(settings, DeleteUserAction.NAME, threadPool, transportService, actionFilters, + super(settings, DeleteUserAction.NAME, transportService, actionFilters, (Supplier) DeleteUserRequest::new); this.usersStore = usersStore; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java index f40db20a339ef..49e8c9d96aba9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.GetUsersAction; import org.elasticsearch.xpack.core.security.action.user.GetUsersRequest; @@ -36,9 +35,9 @@ public class TransportGetUsersAction extends HandledTransportAction { + private final ThreadPool threadPool; private final AuthorizationService authorizationService; @Inject public TransportHasPrivilegesAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, AuthorizationService authorizationService) { - super(settings, HasPrivilegesAction.NAME, threadPool, transportService, actionFilters, HasPrivilegesRequest::new); + super(settings, HasPrivilegesAction.NAME, transportService, actionFilters, HasPrivilegesRequest::new); + this.threadPool = threadPool; this.authorizationService = authorizationService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java index f2b32e68a79ba..85411b0e75f89 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; @@ -28,9 +27,9 @@ public class TransportPutUserAction extends HandledTransportAction { + private final ThreadPool threadPool; private final NativeUsersStore usersStore; @Inject public TransportSetEnabledAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, NativeUsersStore usersStore) { - super(settings, SetEnabledAction.NAME, threadPool, transportService, actionFilters, SetEnabledRequest::new); + super(settings, SetEnabledAction.NAME, transportService, actionFilters, SetEnabledRequest::new); + this.threadPool = threadPool; this.usersStore = usersStore; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java index e7eb1fcc8d798..fba4afe47911e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleResponse; @@ -44,7 +43,7 @@ public void testReservedRole() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); - TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); @@ -75,7 +74,7 @@ public void testValidRole() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); - TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); @@ -119,7 +118,7 @@ public void testException() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); - TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java index 9c19bf2097d22..27ae467c786db 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.GetRolesRequest; import org.elasticsearch.xpack.core.security.action.role.GetRolesResponse; @@ -44,7 +43,7 @@ public void testReservedRoles() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService, new ReservedRolesStore()); final int size = randomIntBetween(1, ReservedRolesStore.names().size()); @@ -90,7 +89,7 @@ public void testStoreRoles() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService, new ReservedRolesStore()); GetRolesRequest request = new GetRolesRequest(); @@ -142,7 +141,7 @@ public void testGetAllOrMix() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService, new ReservedRolesStore()); final List expectedNames = new ArrayList<>(); @@ -206,7 +205,7 @@ public void testException() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService, new ReservedRolesStore()); GetRolesRequest request = new GetRolesRequest(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java index 94a69cc044253..8392f92e0c31f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; @@ -45,8 +44,7 @@ public void testReservedRole() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); @@ -76,8 +74,7 @@ public void testValidRole() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); final boolean created = randomBoolean(); PutRoleRequest request = new PutRoleRequest(); @@ -120,8 +117,7 @@ public void testException() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java index cc67a4facb0ed..ea6713bb85fe9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java @@ -5,19 +5,12 @@ */ package org.elasticsearch.xpack.security.action.rolemapping; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsResponse; @@ -26,6 +19,12 @@ import org.hamcrest.Matchers; import org.junit.Before; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -46,8 +45,7 @@ public void setupMocks() { store = mock(NativeRoleMappingStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - action = new TransportGetRoleMappingsAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - transportService, store); + action = new TransportGetRoleMappingsAction(Settings.EMPTY, mock(ActionFilters.class), transportService, store); namesRef = new AtomicReference<>(null); result = Collections.emptyList(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index 3ba584440bb42..b105c0d5d0ea5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -5,25 +5,24 @@ */ package org.elasticsearch.xpack.security.action.rolemapping; -import java.util.Arrays; -import java.util.Collections; -import java.util.Map; -import java.util.concurrent.atomic.AtomicReference; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.junit.Before; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; + import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -42,8 +41,7 @@ public void setupMocks() { store = mock(NativeRoleMappingStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - action = new TransportPutRoleMappingAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), transportService, store); + action = new TransportPutRoleMappingAction(Settings.EMPTY, mock(ActionFilters.class), transportService, store); requestRef = new AtomicReference<>(null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index b46d307866284..b9232903f52ff 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -168,8 +168,7 @@ void doExecute(Action action, Request request, ActionListener null, null, Collections.emptySet()); final Realms realms = mock(Realms.class); - action = new TransportSamlInvalidateSessionAction(settings, threadPool, transportService, - mock(ActionFilters.class),tokenService, realms); + action = new TransportSamlInvalidateSessionAction(settings, transportService, mock(ActionFilters.class),tokenService, realms); final Path metadata = PathUtils.get(SamlRealm.class.getResource("idp1.xml").toURI()); final Environment env = TestEnvironment.newEnvironment(settings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index 79d4978cfd248..6d177d89021ab 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -183,7 +183,7 @@ public void setup() throws Exception { final TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); final Realms realms = mock(Realms.class); - action = new TransportSamlLogoutAction(settings, threadPool, transportService, mock(ActionFilters.class), realms, tokenService); + action = new TransportSamlLogoutAction(settings, transportService, mock(ActionFilters.class), realms, tokenService); final Path metadata = PathUtils.get(SamlRealm.class.getResource("idp1.xml").toURI()); final Environment env = TestEnvironment.newEnvironment(settings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java index 20af681f477ec..66e2192eee5dd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequest; @@ -39,7 +38,7 @@ public void testInternalUser() { when(securityContext.getUser()).thenReturn(randomFrom(SystemUser.INSTANCE, XPackUser.INSTANCE)); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, transportService, mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); @@ -65,7 +64,7 @@ public void testNullUser() { SecurityContext securityContext = mock(SecurityContext.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, transportService, mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); @@ -93,7 +92,7 @@ public void testValidUser() { when(securityContext.getUser()).thenReturn(user); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, transportService, mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java index bc1c42f66a55b..4aa68c24c8d71 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordResponse; @@ -51,7 +50,7 @@ public void testAnonymousUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportChangePasswordAction action = new TransportChangePasswordAction(settings, mock(ThreadPool.class), transportService, + TransportChangePasswordAction action = new TransportChangePasswordAction(settings, transportService, mock(ActionFilters.class), usersStore); ChangePasswordRequest request = new ChangePasswordRequest(); @@ -82,7 +81,7 @@ public void testInternalUsers() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, transportService, mock(ActionFilters.class), usersStore); ChangePasswordRequest request = new ChangePasswordRequest(); @@ -124,7 +123,7 @@ public void testValidUser() { }).when(usersStore).changePassword(eq(request), any(ActionListener.class)); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, transportService, mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); @@ -165,7 +164,7 @@ public Void answer(InvocationOnMock invocation) { }).when(usersStore).changePassword(eq(request), any(ActionListener.class)); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, transportService, mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java index dab63fcc31336..9c61d0cde520c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; import org.elasticsearch.xpack.core.security.action.user.DeleteUserResponse; @@ -48,8 +47,7 @@ public void testAnonymousUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(settings, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportDeleteUserAction action = new TransportDeleteUserAction(settings, mock(ActionFilters.class), usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(new AnonymousUser(settings).principal()); @@ -77,8 +75,8 @@ public void testInternalUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ActionFilters.class), + usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -107,8 +105,8 @@ public void testReservedUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ActionFilters.class), + usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(reserved.principal()); @@ -137,7 +135,7 @@ public void testValidUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); final boolean found = randomBoolean(); @@ -178,7 +176,7 @@ public void testException() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); final DeleteUserRequest request = new DeleteUserRequest(user.principal()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java index fdb37b2f5bd8c..070fe4e64317b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java @@ -91,7 +91,7 @@ public void testAnonymousUser() { new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, securityIndex, threadPool); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, reservedRealm); GetUsersRequest request = new GetUsersRequest(); @@ -126,7 +126,7 @@ public void testInternalUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); @@ -169,7 +169,7 @@ public void testReservedUsersOnly() { final List names = reservedUsers.stream().map(User::principal).collect(Collectors.toList()); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, reservedRealm); logger.error("names {}", names); @@ -209,7 +209,7 @@ public void testGetAllUsers() { securityIndex, threadPool); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, reservedRealm); GetUsersRequest request = new GetUsersRequest(); @@ -256,7 +256,7 @@ public void testGetStoreOnlyUsers() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); @@ -304,7 +304,7 @@ public void testException() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java index 140508b51a1b0..5f9a3f5243160 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java @@ -58,8 +58,7 @@ public void testAnonymousUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(settings, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportPutUserAction action = new TransportPutUserAction(settings, mock(ActionFilters.class), usersStore, transportService); PutUserRequest request = new PutUserRequest(); request.username(anonymousUser.principal()); @@ -88,8 +87,7 @@ public void testSystemUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); PutUserRequest request = new PutUserRequest(); request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); @@ -129,7 +127,7 @@ public void testReservedUser() { final User reserved = randomFrom(userFuture.actionGet().toArray(new User[0])); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); PutUserRequest request = new PutUserRequest(); @@ -159,7 +157,7 @@ public void testValidUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); final boolean isCreate = randomBoolean(); @@ -205,7 +203,7 @@ public void testException() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); final PutUserRequest request = new PutUserRequest(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java index 3cbb2f8a1bc93..b6a53ae95f098 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.session.Configuration; @@ -25,9 +24,9 @@ public class TransportSqlClearCursorAction extends HandledTransportAction) SqlClearCursorRequest::new); this.planExecutor = planExecutor; this.sqlLicenseChecker = sqlLicenseChecker; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 044683a29ad67..bc96b3de97307 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.proto.ColumnInfo; @@ -34,9 +33,9 @@ public class TransportSqlQueryAction extends HandledTransportAction) SqlQueryRequest::new); this.planExecutor = planExecutor; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java index 0df3b2ad1bb50..61772ce6bb1e9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.session.Configuration; @@ -24,11 +23,9 @@ public class TransportSqlTranslateAction extends HandledTransportAction) SqlTranslateRequest::new); this.planExecutor = planExecutor; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java index cdb1479eec5e3..3d493299d7036 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java @@ -24,9 +24,9 @@ public abstract class WatcherTransportAction request) { - super(settings, actionName, threadPool, transportService, actionFilters, request); + super(settings, actionName, transportService, actionFilters, request); this.licenseState = licenseState; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java index 6a31b0e5cb054..8c056d0dcb8be 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionSnapshot; @@ -54,10 +53,10 @@ public class TransportAckWatchAction extends WatcherTransportAction) DeleteWatchRequest::new); this.client = client; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java index 6ccc7518d8b4c..0cc9af6aafa7b 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java @@ -57,6 +57,7 @@ */ public class TransportExecuteWatchAction extends WatcherTransportAction { + private final ThreadPool threadPool; private final ExecutionService executionService; private final Clock clock; private final TriggerService triggerService; @@ -68,7 +69,8 @@ public TransportExecuteWatchAction(Settings settings, TransportService transport ActionFilters actionFilters, ExecutionService executionService, Clock clock, XPackLicenseState licenseState, WatchParser watchParser, Client client, TriggerService triggerService) { - super(settings, ExecuteWatchAction.NAME, transportService, threadPool, actionFilters, licenseState, ExecuteWatchRequest::new); + super(settings, ExecuteWatchAction.NAME, transportService, actionFilters, licenseState, ExecuteWatchRequest::new); + this.threadPool = threadPool; this.executionService = executionService; this.clock = clock; this.triggerService = triggerService; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java index 6891e3e6272b2..60118f2afdab9 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchAction; @@ -43,9 +42,9 @@ public class TransportGetWatchAction extends WatcherTransportAction { + private final ThreadPool threadPool; private final Clock clock; private final WatchParser parser; private final Client client; @@ -64,7 +65,8 @@ public class TransportPutWatchAction extends WatcherTransportAction Date: Thu, 21 Jun 2018 11:32:11 -0700 Subject: [PATCH 13/34] [DOCS] Add code snippet testing in more ML APIs (#31339) --- x-pack/docs/build.gradle | 22 ++++++++++++++----- x-pack/docs/en/rest-api/ml/forecast.asciidoc | 8 +++---- .../en/rest-api/ml/preview-datafeed.asciidoc | 22 ++++++++++--------- 3 files changed, 32 insertions(+), 20 deletions(-) diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 6c0a4bfcac647..912c9965b4d9a 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -47,7 +47,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/watcher/trigger/schedule/yearly.asciidoc', 'en/watcher/troubleshooting.asciidoc', 'en/rest-api/ml/delete-snapshot.asciidoc', - 'en/rest-api/ml/forecast.asciidoc', 'en/rest-api/ml/get-bucket.asciidoc', 'en/rest-api/ml/get-job-stats.asciidoc', 'en/rest-api/ml/get-overall-buckets.asciidoc', @@ -56,7 +55,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/rest-api/ml/get-influencer.asciidoc', 'en/rest-api/ml/get-snapshot.asciidoc', 'en/rest-api/ml/post-data.asciidoc', - 'en/rest-api/ml/preview-datafeed.asciidoc', 'en/rest-api/ml/revert-snapshot.asciidoc', 'en/rest-api/ml/update-snapshot.asciidoc', 'en/rest-api/watcher/stats.asciidoc', @@ -296,7 +294,9 @@ setups['farequote_index'] = ''' responsetime: type: float airline: - type: keyword + type: keyword + doc_count: + type: integer ''' setups['farequote_data'] = setups['farequote_index'] + ''' - do: @@ -306,11 +306,11 @@ setups['farequote_data'] = setups['farequote_index'] + ''' refresh: true body: | {"index": {"_id":"1"}} - {"airline":"JZA","responsetime":990.4628,"time":"2016-02-07T00:00:00+0000"} + {"airline":"JZA","responsetime":990.4628,"time":"2016-02-07T00:00:00+0000", "doc_count": 5} {"index": {"_id":"2"}} - {"airline":"JBU","responsetime":877.5927,"time":"2016-02-07T00:00:00+0000"} + {"airline":"JBU","responsetime":877.5927,"time":"2016-02-07T00:00:00+0000", "doc_count": 23} {"index": {"_id":"3"}} - {"airline":"KLM","responsetime":1355.4812,"time":"2016-02-07T00:00:00+0000"} + {"airline":"KLM","responsetime":1355.4812,"time":"2016-02-07T00:00:00+0000", "doc_count": 42} ''' setups['farequote_job'] = setups['farequote_data'] + ''' - do: @@ -332,6 +332,16 @@ setups['farequote_job'] = setups['farequote_data'] + ''' } } ''' +setups['farequote_datafeed'] = setups['farequote_job'] + ''' + - do: + xpack.ml.put_datafeed: + datafeed_id: "datafeed-farequote" + body: > + { + "job_id":"farequote", + "indexes":"farequote" + } +''' setups['server_metrics_index'] = ''' - do: indices.create: diff --git a/x-pack/docs/en/rest-api/ml/forecast.asciidoc b/x-pack/docs/en/rest-api/ml/forecast.asciidoc index 169debef7b6cb..99647ecae1b25 100644 --- a/x-pack/docs/en/rest-api/ml/forecast.asciidoc +++ b/x-pack/docs/en/rest-api/ml/forecast.asciidoc @@ -5,7 +5,7 @@ Forecast Jobs ++++ -Predict the future behavior of a time series by using historical behavior. +Predicts the future behavior of a time series by using its historical behavior. ==== Request @@ -62,7 +62,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_forecast } -------------------------------------------------- // CONSOLE -// TEST[skip:todo] +// TEST[skip:requires delay] When the forecast is created, you receive the following results: [source,js] @@ -72,7 +72,7 @@ When the forecast is created, you receive the following results: "forecast_id": "wkCWa2IB2lF8nSE_TzZo" } ---- +// NOTCONSOLE You can subsequently see the forecast in the *Single Metric Viewer* in {kib}. -//and in the results that you retrieve by using {ml} APIs such as the -//<> and <>. + diff --git a/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc b/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc index e6b51f8ef069f..637b506cb9af7 100644 --- a/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc +++ b/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc @@ -31,7 +31,6 @@ structure of the data that will be passed to the anomaly detection engine. You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. -//<>. ==== Security Integration @@ -54,27 +53,30 @@ The following example obtains a preview of the `datafeed-farequote` {dfeed}: GET _xpack/ml/datafeeds/datafeed-farequote/_preview -------------------------------------------------- // CONSOLE -// TEST[skip:todo] +// TEST[setup:farequote_datafeed] The data that is returned for this example is as follows: [source,js] ---- [ { - "@timestamp": 1454803200000, - "airline": "AAL", - "responsetime": 132.20460510253906 - }, - { - "@timestamp": 1454803200000, + "time": 1454803200000, "airline": "JZA", + "doc_count": 5, "responsetime": 990.4628295898438 }, { - "@timestamp": 1454803200000, + "time": 1454803200000, "airline": "JBU", + "doc_count": 23, "responsetime": 877.5927124023438 }, - ... + { + "time": 1454803200000, + "airline": "KLM", + "doc_count": 42, + "responsetime": 1355.481201171875 + } ] ---- +// TESTRESPONSE From 99f503e3bea5c289907ff16b438875ecf2a8f2a2 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Fri, 22 Jun 2018 00:16:03 +0300 Subject: [PATCH 14/34] [DOCS] Fix REST tests in SQL docs Fixed a search & replace gone awry Tweaked the docs a bit --- x-pack/docs/en/sql/index.asciidoc | 6 +++--- x-pack/docs/en/sql/language/syntax/select.asciidoc | 12 ++++++------ x-pack/docs/en/sql/overview.asciidoc | 2 ++ 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/x-pack/docs/en/sql/index.asciidoc b/x-pack/docs/en/sql/index.asciidoc index 4c2130208927a..f96b83db08ad6 100644 --- a/x-pack/docs/en/sql/index.asciidoc +++ b/x-pack/docs/en/sql/index.asciidoc @@ -35,11 +35,11 @@ indices and return results in tabular format. SQL and print tabular results. <>:: A JDBC driver for {es}. -<>:: - List of functions and operators supported. <>:: Overview of the {es-sql} language, such as supported data types, commands and syntax. +<>:: + List of functions and operators supported. -- include::overview.asciidoc[] @@ -47,8 +47,8 @@ include::getting-started.asciidoc[] include::concepts.asciidoc[] include::security.asciidoc[] include::endpoints/index.asciidoc[] -include::functions/index.asciidoc[] include::language/index.asciidoc[] +include::functions/index.asciidoc[] include::appendix/index.asciidoc[] :jdbc-tests!: diff --git a/x-pack/docs/en/sql/language/syntax/select.asciidoc b/x-pack/docs/en/sql/language/syntax/select.asciidoc index f39cbc0c2f8ca..35c2bf0737db9 100644 --- a/x-pack/docs/en/sql/language/syntax/select.asciidoc +++ b/x-pack/docs/en/sql/language/syntax/select.asciidoc @@ -191,14 +191,14 @@ which results in something like: [source,text] -------------------------------------------------- author | name | page_count | release_date ------------------`--------------------`---------------`------------------------ +-----------------+--------------------+---------------+------------------------ Peter F. Hamilton|Pandora's Star |768 |2004-03-02T00:00:00.000Z Vernor Vinge |A Fire Upon the Deep|613 |1992-06-01T00:00:00.000Z Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z Alastair Reynolds|Revelation Space |585 |2000-03-15T00:00:00.000Z James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00.000Z -------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/] +// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/] // TESTRESPONSE[_cat] [[sql-syntax-order-by-score]] @@ -228,13 +228,13 @@ Which results in something like: [source,text] -------------------------------------------------- SCORE() | author | name | page_count | release_date ----------------`---------------`-------------------`---------------`------------------------ +---------------+---------------+-------------------+---------------+------------------------ 2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z 1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z 1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z 1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z -------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/ s/\(/\\\(/ s/\)/\\\)/] +// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/ s/\(/\\\(/ s/\)/\\\)/] // TESTRESPONSE[_cat] Note that you can return `SCORE()` by adding it to the where clause. This @@ -253,13 +253,13 @@ POST /_xpack/sql?format=txt [source,text] -------------------------------------------------- SCORE() | author | name | page_count | release_date ----------------`---------------`-------------------`---------------`------------------------ +---------------+---------------+-------------------+---------------+------------------------ 2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z 1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z 1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z 1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z -------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/ s/\(/\\\(/ s/\)/\\\)/] +// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/ s/\(/\\\(/ s/\)/\\\)/] // TESTRESPONSE[_cat] NOTE: diff --git a/x-pack/docs/en/sql/overview.asciidoc b/x-pack/docs/en/sql/overview.asciidoc index 34d0dfb538352..36eff69f6263d 100644 --- a/x-pack/docs/en/sql/overview.asciidoc +++ b/x-pack/docs/en/sql/overview.asciidoc @@ -4,6 +4,7 @@ {es-sql} aims to provide a powerful yet lightweight SQL interface to {es}. [[sql-introduction]] +[float] === Introduction {es-sql} is an X-Pack component that allows SQL-like queries to be executed in real-time against {es}. @@ -12,6 +13,7 @@ _natively_ inside {es}. One can think of {es-sql} as a _translator_, one that understands both SQL and {es} and makes it easy to read and process data in real-time, at scale by leveraging {es} capabilities. [[sql-why]] +[float] === Why {es-sql} ? Native integration:: From 3b7225e9d1dead1718132a51fdde5d1cbe4a42b9 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Thu, 21 Jun 2018 16:53:20 -0700 Subject: [PATCH 15/34] In NumberFieldType equals and hashCode, make sure that NumberType is taken into account. (#31514) --- .../index/mapper/NumberFieldMapper.java | 18 ++++++++++++++++-- .../index/mapper/NumberFieldTypeTests.java | 15 +++++++++++++-- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 9c327c5294efe..b4531f9c489e3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -846,7 +846,7 @@ private static double objectToDouble(Object value) { public static final class NumberFieldType extends SimpleMappedFieldType { - NumberType type; + private final NumberType type; public NumberFieldType(NumberType type) { super(); @@ -856,7 +856,7 @@ public NumberFieldType(NumberType type) { setOmitNorms(true); } - NumberFieldType(NumberFieldType other) { + private NumberFieldType(NumberFieldType other) { super(other); this.type = other.type; } @@ -936,6 +936,20 @@ public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) { return new DocValueFormat.Decimal(format); } } + + @Override + public boolean equals(Object o) { + if (super.equals(o) == false) { + return false; + } + NumberFieldType that = (NumberFieldType) o; + return type == that.type; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), type); + } } private Explicit ignoreMalformed; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java index 3ffe48fe70af6..4b2967553e57b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.document.Document; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.HalfFloatPoint; @@ -37,10 +36,11 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.TestUtil; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.hamcrest.Matchers; import org.junit.Before; @@ -68,6 +68,17 @@ protected MappedFieldType createDefaultFieldType() { return new NumberFieldMapper.NumberFieldType(type); } + public void testEqualsWithDifferentNumberTypes() { + NumberType type = randomFrom(NumberType.values()); + NumberFieldType fieldType = new NumberFieldType(type); + + NumberType otherType = randomValueOtherThan(type, + () -> randomFrom(NumberType.values())); + NumberFieldType otherFieldType = new NumberFieldType(otherType); + + assertNotEquals(fieldType, otherFieldType); + } + public void testIsFieldWithinQuery() throws IOException { MappedFieldType ft = createDefaultFieldType(); // current impl ignores args and should always return INTERSECTS From 724438a0b084a9a9d20790ce141bf8ec18265950 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Fri, 22 Jun 2018 10:15:38 +1000 Subject: [PATCH 16/34] [Security] Check auth scheme case insensitively (#31490) According to RFC 7617, the Basic authentication scheme name should not be case sensitive. Case insensitive comparisons are also applicable for the bearer tokens where Bearer authentication scheme is used as per RFC 6750 and RFC 7235 Some Http clients may send authentication scheme names in different case types for eg. Basic, basic, BASIC, BEARER etc., so the lack of case-insensitive check is an issue when these clients try to authenticate with elasticsearch. This commit adds case-insensitive checks for Basic and Bearer authentication schemes. Closes #31486 --- .../authc/support/UsernamePasswordToken.java | 13 ++++++++----- .../xpack/security/authc/TokenService.java | 2 +- .../security/authc/TokenServiceTests.java | 18 +++++++++++++++++- .../support/UsernamePasswordTokenTests.java | 7 ++++--- 4 files changed, 30 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java index 4fdf32608dd6a..d8e58c29d237b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.security.authc.support; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; @@ -20,6 +21,8 @@ public class UsernamePasswordToken implements AuthenticationToken { public static final String BASIC_AUTH_PREFIX = "Basic "; public static final String BASIC_AUTH_HEADER = "Authorization"; + // authorization scheme check is case-insensitive + private static final boolean IGNORE_CASE_AUTH_HEADER_MATCH = true; private final String username; private final SecureString password; @@ -79,15 +82,15 @@ public int hashCode() { public static UsernamePasswordToken extractToken(ThreadContext context) { String authStr = context.getHeader(BASIC_AUTH_HEADER); - if (authStr == null) { - return null; - } - return extractToken(authStr); } private static UsernamePasswordToken extractToken(String headerValue) { - if (headerValue.startsWith(BASIC_AUTH_PREFIX) == false) { + if (Strings.isNullOrEmpty(headerValue)) { + return null; + } + if (headerValue.regionMatches(IGNORE_CASE_AUTH_HEADER_MATCH, 0, BASIC_AUTH_PREFIX, 0, + BASIC_AUTH_PREFIX.length()) == false) { // the header does not start with 'Basic ' so we cannot use it, but it may be valid for another realm return null; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 2934fb8062de4..8b6dd8295d399 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -1007,7 +1007,7 @@ private void maybeStartTokenRemover() { */ private String getFromHeader(ThreadContext threadContext) { String header = threadContext.getHeader("Authorization"); - if (Strings.hasLength(header) && header.startsWith("Bearer ") + if (Strings.hasText(header) && header.regionMatches(true, 0, "Bearer ", 0, "Bearer ".length()) && header.length() > "Bearer ".length()) { return header.substring("Bearer ".length()); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 28cf4bf95c924..d5e67f3996a7b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -71,6 +71,7 @@ import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; @@ -162,7 +163,7 @@ public void testAttachAndGetToken() throws Exception { mockGetTokenFromId(token); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); - requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); + requestContext.putHeader("Authorization", randomFrom("Bearer ", "BEARER ", "bearer ") + tokenService.getUserTokenString(token)); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); @@ -183,6 +184,21 @@ public void testAttachAndGetToken() throws Exception { } } + public void testInvalidAuthorizationHeader() throws Exception { + TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); + ThreadContext requestContext = new ThreadContext(Settings.EMPTY); + String token = randomFrom("", " "); + String authScheme = randomFrom("Bearer ", "BEARER ", "bearer ", "Basic "); + requestContext.putHeader("Authorization", authScheme + token); + + try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { + PlainActionFuture future = new PlainActionFuture<>(); + tokenService.getAndValidateToken(requestContext, future); + UserToken serialized = future.get(); + assertThat(serialized, nullValue()); + } + } + public void testRotateKey() throws Exception { TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/UsernamePasswordTokenTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/UsernamePasswordTokenTests.java index 57c452798844c..86b9635851bb6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/UsernamePasswordTokenTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/UsernamePasswordTokenTests.java @@ -45,7 +45,8 @@ public void testPutToken() throws Exception { public void testExtractToken() throws Exception { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - String header = "Basic " + Base64.getEncoder().encodeToString("user1:test123".getBytes(StandardCharsets.UTF_8)); + final String header = randomFrom("Basic ", "basic ", "BASIC ") + + Base64.getEncoder().encodeToString("user1:test123".getBytes(StandardCharsets.UTF_8)); threadContext.putHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, header); UsernamePasswordToken token = UsernamePasswordToken.extractToken(threadContext); assertThat(token, notNullValue()); @@ -54,7 +55,7 @@ public void testExtractToken() throws Exception { } public void testExtractTokenInvalid() throws Exception { - String[] invalidValues = { "Basic ", "Basic f" }; + final String[] invalidValues = { "Basic ", "Basic f", "basic " }; for (String value : invalidValues) { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); threadContext.putHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, value); @@ -70,7 +71,7 @@ public void testExtractTokenInvalid() throws Exception { public void testHeaderNotMatchingReturnsNull() { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - String header = randomFrom("BasicBroken", "invalid", "Basic"); + final String header = randomFrom("Basic", "BasicBroken", "invalid", " basic "); threadContext.putHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, header); UsernamePasswordToken extracted = UsernamePasswordToken.extractToken(threadContext); assertThat(extracted, nullValue()); From 009ae48cbae967d102b28d8b8e0bf25197c90421 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Fri, 22 Jun 2018 17:47:20 +1000 Subject: [PATCH 17/34] [PkiRealm] Invalidate cache on role mappings change (#31510) PkiRealm caches successful authentications and provides ways to invalidate the cache. But in some scenario's the cache was not being invalidated on role mapping change. PkiRealm does not inform role mapper to be notified for cache refresh on role mapping updates. The logic in `TransportClearRealmCacheAction#nodeOperation` which gets invoked for refreshing cache on realms, considers null or empty realm names in the request as clear cache on all realms. When LDAP realm is not present then it clears cache for all realms so it works fine, but when LDAP realm is configured then role mapper sends a request with LDAP realm names and so the cache is cleared only for those realms. This commit resolves the issue by registering PkiRealm with role mapper for cache refresh. PkiRealm implements CachingRealm and as it does not extend CachingUsernamePasswordRealm, have modified the interface method `refreshRealmOnChange` to accept CachingRealm. --- .../org/elasticsearch/xpack/security/authc/pki/PkiRealm.java | 1 + .../xpack/security/authc/support/CachingRealm.java | 5 +++++ .../xpack/security/authc/support/DnRoleMapper.java | 2 +- .../xpack/security/authc/support/UserRoleMapper.java | 2 +- .../security/authc/support/mapper/CompositeRoleMapper.java | 4 ++-- .../authc/support/mapper/NativeRoleMappingStore.java | 4 ++-- .../xpack/security/authc/pki/PkiRealmTests.java | 3 +++ 7 files changed, 15 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java index e65ac29aafe95..7b9eabfd7066f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java @@ -86,6 +86,7 @@ public PkiRealm(RealmConfig config, ResourceWatcherService watcherService, Nativ this.trustManager = trustManagers(config); this.principalPattern = PkiRealmSettings.USERNAME_PATTERN_SETTING.get(config.settings()); this.roleMapper = roleMapper; + this.roleMapper.refreshRealmOnChange(this); this.cache = CacheBuilder.builder() .setExpireAfterWrite(PkiRealmSettings.CACHE_TTL_SETTING.get(config.settings())) .setMaximumWeight(PkiRealmSettings.CACHE_MAX_USERS_SETTING.get(config.settings())) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingRealm.java index 4c18ac2df6d6e..6089c8f9a70fb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingRealm.java @@ -13,6 +13,11 @@ */ public interface CachingRealm { + /** + * @return The name of this realm. + */ + String name(); + /** * Expires a single user from the cache identified by the String agument * @param username the identifier of the user to be cleared diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java index 6516b02f68d0b..9ff4cd9be824b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java @@ -69,7 +69,7 @@ public DnRoleMapper(RealmConfig config, ResourceWatcherService watcherService) { } @Override - public void refreshRealmOnChange(CachingUsernamePasswordRealm realm) { + public void refreshRealmOnChange(CachingRealm realm) { addListener(realm::expireAll); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java index ffdab15e3b507..8c60e565e681a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java @@ -44,7 +44,7 @@ public interface UserRoleMapper { * the whole cluster depending on whether this role-mapper has node-local data or cluster-wide * data. */ - void refreshRealmOnChange(CachingUsernamePasswordRealm realm); + void refreshRealmOnChange(CachingRealm realm); /** * A representation of a user for whom roles should be mapped. diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java index 0814469cfcea7..956060a65789c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java @@ -16,7 +16,7 @@ import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.security.authc.RealmConfig; -import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; +import org.elasticsearch.xpack.security.authc.support.CachingRealm; import org.elasticsearch.xpack.security.authc.support.DnRoleMapper; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; @@ -48,7 +48,7 @@ public void resolveRoles(UserData user, ActionListener> listener) { } @Override - public void refreshRealmOnChange(CachingUsernamePasswordRealm realm) { + public void refreshRealmOnChange(CachingRealm realm) { this.delegates.forEach(mapper -> mapper.refreshRealmOnChange(realm)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 7df4114863de2..677d13082ca90 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -34,7 +34,7 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionModel; import org.elasticsearch.xpack.core.security.client.SecurityClient; -import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; +import org.elasticsearch.xpack.security.authc.support.CachingRealm; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -369,7 +369,7 @@ public void resolveRoles(UserData user, ActionListener> listener) { * @see ClearRealmCacheAction */ @Override - public void refreshRealmOnChange(CachingUsernamePasswordRealm realm) { + public void refreshRealmOnChange(CachingRealm realm) { realmsToRefresh.add(realm.name()); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java index 74f6598f8dd1c..44d5859d12b67 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java @@ -50,6 +50,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class PkiRealmTests extends ESTestCase { @@ -104,6 +105,7 @@ private void assertSuccessfulAuthentication(Set roles) throws Exception UserRoleMapper roleMapper = mock(UserRoleMapper.class); PkiRealm realm = new PkiRealm(new RealmConfig("", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)), roleMapper); + verify(roleMapper).refreshRealmOnChange(realm); Mockito.doAnswer(invocation -> { final UserRoleMapper.UserData userData = (UserRoleMapper.UserData) invocation.getArguments()[0]; final ActionListener> listener = (ActionListener>) invocation.getArguments()[1]; @@ -144,6 +146,7 @@ private void assertSuccessfulAuthentication(Set roles) throws Exception final int numTimes = invalidate ? 2 : 1; verify(roleMapper, times(numTimes)).resolveRoles(any(UserRoleMapper.UserData.class), any(ActionListener.class)); + verifyNoMoreInteractions(roleMapper); } public void testCustomUsernamePattern() throws Exception { From 0352d88621c1d3794197105f7fef97e669c40179 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 22 Jun 2018 09:57:32 +0200 Subject: [PATCH 18/34] Get Mapping API to honour allow_no_indices and ignore_unavailable (#31507) Get Mapping currently throws index not found exception (and returns 404 status code) from the REST layer whenever an index was specified and no indices have been returned. We should not have this logic in the REST layer though as only our index resolver should decide whether we need to throw exceptions or not based on provided indices and corresponding indices options. Closes #31485 --- .../indices.get_mapping/30_missing_index.yml | 21 +++++++++++++++++++ .../50_wildcard_expansion.yml | 16 +++++++++++++- .../admin/indices/RestGetMappingAction.java | 12 +++-------- 3 files changed, 39 insertions(+), 10 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml index cf4e5b56e786e..4d3abb292f467 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml @@ -13,3 +13,24 @@ indices.get_mapping: index: test_index +--- +"Index missing, ignore_unavailable=true": + - skip: + version: " - 6.99.99" + reason: ignore_unavailable was ignored in previous versions + - do: + indices.get_mapping: + index: test_index + ignore_unavailable: true + + - match: { '': {} } + +--- +"Index missing, ignore_unavailable=true, allow_no_indices=false": + - do: + catch: missing + indices.get_mapping: + index: test_index + ignore_unavailable: true + allow_no_indices: false + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml index a0552f395edb5..d1a95b2690745 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml @@ -94,12 +94,26 @@ setup: --- "Get test-* with wildcard_expansion=none": + - skip: + version: " - 6.99.99" + reason: allow_no_indices (defaults to true) was ignored in previous versions - do: - catch: missing indices.get_mapping: index: test-x* expand_wildcards: none + - match: { '': {} } +--- +"Get test-* with wildcard_expansion=none allow_no_indices=false": + - skip: + version: " - 6.99.99" + reason: allow_no_indices was ignored in previous versions + - do: + catch: missing + indices.get_mapping: + index: test-x* + expand_wildcards: none + allow_no_indices: false --- "Get test-* with wildcard_expansion=open,closed": diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index 08f8449b7017f..f5d99bbb46ca1 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.TypeMissingException; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -89,14 +88,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC @Override public RestResponse buildResponse(final GetMappingsResponse response, final XContentBuilder builder) throws Exception { final ImmutableOpenMap> mappingsByIndex = response.getMappings(); - if (mappingsByIndex.isEmpty() && (indices.length != 0 || types.length != 0)) { - if (indices.length != 0 && types.length == 0) { - builder.close(); - return new BytesRestResponse(channel, new IndexNotFoundException(String.join(",", indices))); - } else { - builder.close(); - return new BytesRestResponse(channel, new TypeMissingException("_all", String.join(",", types))); - } + if (mappingsByIndex.isEmpty() && types.length != 0) { + builder.close(); + return new BytesRestResponse(channel, new TypeMissingException("_all", String.join(",", types))); } final Set typeNames = new HashSet<>(); From eade161894011cf19d47d14e4c3c1e15dd945227 Mon Sep 17 00:00:00 2001 From: Sohaib Iftikhar Date: Fri, 22 Jun 2018 09:59:04 +0200 Subject: [PATCH 19/34] REST high-level client: add simulate pipeline API (#31158) relates to #27205 --- .../elasticsearch/client/IngestClient.java | 35 +++++ .../client/RequestConverters.java | 15 ++ .../client/ESRestHighLevelClientTestCase.java | 10 +- .../elasticsearch/client/IngestClientIT.java | 99 +++++++++++++ .../client/RequestConvertersTests.java | 29 ++++ .../IngestClientDocumentationIT.java | 111 ++++++++++++++ .../ingest/simulate_pipeline.asciidoc | 90 ++++++++++++ .../high-level/supported-apis.asciidoc | 2 + .../ingest/SimulateDocumentBaseResult.java | 36 +++++ .../ingest/SimulateDocumentVerboseResult.java | 23 ++- .../ingest/SimulatePipelineRequest.java | 10 +- .../ingest/SimulatePipelineResponse.java | 72 +++++++++ .../ingest/SimulateProcessorResult.java | 74 +++++++++- .../ingest/WriteableIngestDocument.java | 83 ++++++++++- .../elasticsearch/ingest/IngestDocument.java | 13 +- .../SimulateDocumentBaseResultTests.java | 138 ++++++++++++++++++ .../SimulateDocumentSimpleResultTests.java | 60 -------- .../SimulateDocumentVerboseResultTests.java | 113 ++++++++++++++ .../ingest/SimulatePipelineResponseTests.java | 135 ++++++++++++----- .../ingest/SimulateProcessorResultTests.java | 116 +++++++++++++-- .../ingest/WriteableIngestDocumentTests.java | 47 +++++- 21 files changed, 1182 insertions(+), 129 deletions(-) create mode 100644 docs/java-rest/high-level/ingest/simulate_pipeline.asciidoc create mode 100644 server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResultTests.java delete mode 100644 server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java create mode 100644 server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java index 5c5a82b52f438..340e14653971b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java @@ -24,6 +24,8 @@ import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.ingest.WritePipelineResponse; import java.io.IOException; @@ -125,4 +127,37 @@ public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions op restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::deletePipeline, options, WritePipelineResponse::fromXContent, listener, emptySet()); } + + /** + * Simulate a pipeline on a set of documents provided in the request + *

+ * See + * + * Simulate Pipeline API on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public SimulatePipelineResponse simulatePipeline(SimulatePipelineRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::simulatePipeline, options, + SimulatePipelineResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously simulate a pipeline on a set of documents provided in the request + *

+ * See + * + * Simulate Pipeline API on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void simulatePipelineAsync(SimulatePipelineRequest request, + RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::simulatePipeline, options, + SimulatePipelineResponse::fromXContent, listener, emptySet()); + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index cd67bc8e48325..3d5d275732a6c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -71,6 +71,7 @@ import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; @@ -886,6 +887,20 @@ static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws I return request; } + static Request simulatePipeline(SimulatePipelineRequest simulatePipelineRequest) throws IOException { + EndpointBuilder builder = new EndpointBuilder().addPathPartAsIs("_ingest/pipeline"); + if (simulatePipelineRequest.getId() != null && !simulatePipelineRequest.getId().isEmpty()) { + builder.addPathPart(simulatePipelineRequest.getId()); + } + builder.addPathPartAsIs("_simulate"); + String endpoint = builder.build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + Params params = new Params(request); + params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose())); + request.setEntity(createEntity(simulatePipelineRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request getAlias(GetAliasesRequest getAliasesRequest) { String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices(); String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java index 4ad39f547584b..69fbab30c336c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java @@ -85,9 +85,7 @@ private HighLevelClient(RestClient restClient) { } } - protected static XContentBuilder buildRandomXContentPipeline() throws IOException { - XContentType xContentType = randomFrom(XContentType.values()); - XContentBuilder pipelineBuilder = XContentBuilder.builder(xContentType.xContent()); + protected static XContentBuilder buildRandomXContentPipeline(XContentBuilder pipelineBuilder) throws IOException { pipelineBuilder.startObject(); { pipelineBuilder.field(Pipeline.DESCRIPTION_KEY, "some random set of processors"); @@ -114,6 +112,12 @@ protected static XContentBuilder buildRandomXContentPipeline() throws IOExceptio return pipelineBuilder; } + protected static XContentBuilder buildRandomXContentPipeline() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder pipelineBuilder = XContentBuilder.builder(xContentType.xContent()); + return buildRandomXContentPipeline(pipelineBuilder); + } + protected static void createPipeline(String pipelineId) throws IOException { XContentBuilder builder = buildRandomXContentPipeline(); createPipeline(new PutPipelineRequest(pipelineId, BytesReference.bytes(builder), builder.contentType())); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java index ecc0d0052d415..6fd6f95059577 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java @@ -23,12 +23,22 @@ import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; +import org.elasticsearch.action.ingest.SimulateDocumentResult; +import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.ingest.PipelineConfiguration; import java.io.IOException; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.core.IsInstanceOf.instanceOf; public class IngestClientIT extends ESRestHighLevelClientTestCase { @@ -80,4 +90,93 @@ public void testDeletePipeline() throws IOException { execute(request, highLevelClient().ingest()::deletePipeline, highLevelClient().ingest()::deletePipelineAsync); assertTrue(response.isAcknowledged()); } + + public void testSimulatePipeline() throws IOException { + testSimulatePipeline(false, false); + } + + public void testSimulatePipelineWithFailure() throws IOException { + testSimulatePipeline(false, true); + } + + public void testSimulatePipelineVerbose() throws IOException { + testSimulatePipeline(true, false); + } + + public void testSimulatePipelineVerboseWithFailure() throws IOException { + testSimulatePipeline(true, true); + } + + private void testSimulatePipeline(boolean isVerbose, + boolean isFailure) throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentBuilder.builder(xContentType.xContent()); + String rankValue = isFailure ? "non-int" : Integer.toString(1234); + builder.startObject(); + { + builder.field("pipeline"); + buildRandomXContentPipeline(builder); + builder.startArray("docs"); + { + builder.startObject() + .field("_index", "index") + .field("_type", "doc") + .field("_id", "doc_" + 1) + .startObject("_source").field("foo", "rab_" + 1).field("rank", rankValue).endObject() + .endObject(); + } + builder.endArray(); + } + builder.endObject(); + + SimulatePipelineRequest request = new SimulatePipelineRequest( + BytesReference.bytes(builder), + builder.contentType() + ); + request.setVerbose(isVerbose); + SimulatePipelineResponse response = + execute(request, highLevelClient().ingest()::simulatePipeline, highLevelClient().ingest()::simulatePipelineAsync); + List results = response.getResults(); + assertEquals(1, results.size()); + if (isVerbose) { + assertThat(results.get(0), instanceOf(SimulateDocumentVerboseResult.class)); + SimulateDocumentVerboseResult verboseResult = (SimulateDocumentVerboseResult) results.get(0); + assertEquals(2, verboseResult.getProcessorResults().size()); + if (isFailure) { + assertNotNull(verboseResult.getProcessorResults().get(1).getFailure()); + assertThat(verboseResult.getProcessorResults().get(1).getFailure().getMessage(), + containsString("unable to convert [non-int] to integer")); + } else { + assertEquals( + verboseResult.getProcessorResults().get(0).getIngestDocument() + .getFieldValue("foo", String.class), + "bar" + ); + assertEquals( + Integer.valueOf(1234), + verboseResult.getProcessorResults().get(1).getIngestDocument() + .getFieldValue("rank", Integer.class) + ); + } + } else { + assertThat(results.get(0), instanceOf(SimulateDocumentBaseResult.class)); + SimulateDocumentBaseResult baseResult = (SimulateDocumentBaseResult)results.get(0); + if (isFailure) { + assertNotNull(baseResult.getFailure()); + assertThat(baseResult.getFailure().getMessage(), + containsString("unable to convert [non-int] to integer")); + } else { + assertNotNull(baseResult.getIngestDocument()); + assertEquals( + baseResult.getIngestDocument().getFieldValue("foo", String.class), + "bar" + ); + assertEquals( + Integer.valueOf(1234), + baseResult.getIngestDocument() + .getFieldValue("rank", Integer.class) + ); + } + } + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index eee37cea561b0..e8bbbf6f5fd0c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -74,6 +74,7 @@ import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; @@ -1534,6 +1535,34 @@ public void testDeletePipeline() { assertEquals(expectedParams, expectedRequest.getParameters()); } + public void testSimulatePipeline() throws IOException { + String pipelineId = randomBoolean() ? "some_pipeline_id" : null; + boolean verbose = randomBoolean(); + String json = "{\"pipeline\":{" + + "\"description\":\"_description\"," + + "\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]}," + + "\"docs\":[{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}]}"; + SimulatePipelineRequest request = new SimulatePipelineRequest( + new BytesArray(json.getBytes(StandardCharsets.UTF_8)), + XContentType.JSON + ); + request.setId(pipelineId); + request.setVerbose(verbose); + Map expectedParams = new HashMap<>(); + expectedParams.put("verbose", Boolean.toString(verbose)); + + Request expectedRequest = RequestConverters.simulatePipeline(request); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + endpoint.add("_ingest/pipeline"); + if (pipelineId != null && !pipelineId.isEmpty()) + endpoint.add(pipelineId); + endpoint.add("_simulate"); + assertEquals(endpoint.toString(), expectedRequest.getEndpoint()); + assertEquals(HttpPost.METHOD_NAME, expectedRequest.getMethod()); + assertEquals(expectedParams, expectedRequest.getParameters()); + assertToXContentBody(request, expectedRequest.getEntity()); + } + public void testClusterHealth() { ClusterHealthRequest healthRequest = new ClusterHealthRequest(); Map expectedParams = new HashMap<>(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java index f5bdc9f2f3ee5..c53ec2b5d7cc7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java @@ -25,6 +25,12 @@ import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; +import org.elasticsearch.action.ingest.SimulateDocumentResult; +import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; +import org.elasticsearch.action.ingest.SimulateProcessorResult; import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; @@ -277,4 +283,109 @@ public void onFailure(Exception e) { } } + public void testSimulatePipeline() throws IOException { + RestHighLevelClient client = highLevelClient(); + + { + // tag::simulate-pipeline-request + String source = + "{\"" + + "pipeline\":{" + + "\"description\":\"_description\"," + + "\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]" + + "}," + + "\"docs\":[" + + "{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"bar\"}}," + + "{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}" + + "]" + + "}"; + SimulatePipelineRequest request = new SimulatePipelineRequest( + new BytesArray(source.getBytes(StandardCharsets.UTF_8)), // <1> + XContentType.JSON // <2> + ); + // end::simulate-pipeline-request + + // tag::simulate-pipeline-request-pipeline-id + request.setId("my-pipeline-id"); // <1> + // end::simulate-pipeline-request-pipeline-id + + // For testing we set this back to null + request.setId(null); + + // tag::simulate-pipeline-request-verbose + request.setVerbose(true); // <1> + // end::simulate-pipeline-request-verbose + + // tag::simulate-pipeline-execute + SimulatePipelineResponse response = client.ingest().simulatePipeline(request, RequestOptions.DEFAULT); // <1> + // end::simulate-pipeline-execute + + // tag::simulate-pipeline-response + for (SimulateDocumentResult result: response.getResults()) { // <1> + if (request.isVerbose()) { + assert result instanceof SimulateDocumentVerboseResult; + SimulateDocumentVerboseResult verboseResult = (SimulateDocumentVerboseResult)result; // <2> + for (SimulateProcessorResult processorResult: verboseResult.getProcessorResults()) { // <3> + processorResult.getIngestDocument(); // <4> + processorResult.getFailure(); // <5> + } + } else { + assert result instanceof SimulateDocumentBaseResult; + SimulateDocumentBaseResult baseResult = (SimulateDocumentBaseResult)result; // <6> + baseResult.getIngestDocument(); // <7> + baseResult.getFailure(); // <8> + } + } + // end::simulate-pipeline-response + assert(response.getResults().size() > 0); + } + } + + public void testSimulatePipelineAsync() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + String source = + "{\"" + + "pipeline\":{" + + "\"description\":\"_description\"," + + "\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]" + + "}," + + "\"docs\":[" + + "{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"bar\"}}," + + "{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}" + + "]" + + "}"; + SimulatePipelineRequest request = new SimulatePipelineRequest( + new BytesArray(source.getBytes(StandardCharsets.UTF_8)), + XContentType.JSON + ); + + // tag::simulate-pipeline-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(SimulatePipelineResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::simulate-pipeline-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::simulate-pipeline-execute-async + client.ingest().simulatePipelineAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::simulate-pipeline-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + } diff --git a/docs/java-rest/high-level/ingest/simulate_pipeline.asciidoc b/docs/java-rest/high-level/ingest/simulate_pipeline.asciidoc new file mode 100644 index 0000000000000..9d1bbd06ceb26 --- /dev/null +++ b/docs/java-rest/high-level/ingest/simulate_pipeline.asciidoc @@ -0,0 +1,90 @@ +[[java-rest-high-ingest-simulate-pipeline]] +=== Simulate Pipeline API + +[[java-rest-high-ingest-simulate-pipeline-request]] +==== Simulate Pipeline Request + +A `SimulatePipelineRequest` requires a source and a `XContentType`. The source consists +of the request body. See the https://www.elastic.co/guide/en/elasticsearch/reference/master/simulate-pipeline-api.html[docs] +for more details on the request body. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-request] +-------------------------------------------------- +<1> The request body as a `ByteArray`. +<2> The XContentType for the request body supplied above. + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-request-pipeline-id] +-------------------------------------------------- +<1> You can either specify an existing pipeline to execute against the provided documents, or supply a +pipeline definition in the body of the request. This option sets the id for an existing pipeline. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-request-verbose] +-------------------------------------------------- +<1> To see the intermediate results of each processor in the simulate request, you can add the verbose parameter +to the request. + +[[java-rest-high-ingest-simulate-pipeline-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-execute] +-------------------------------------------------- +<1> Execute the request and get back the response in a `SimulatePipelineResponse` object. + +[[java-rest-high-ingest-simulate-pipeline-async]] +==== Asynchronous Execution + +The asynchronous execution of a simulate pipeline request requires both the `SimulatePipelineRequest` +instance and an `ActionListener` instance to be passed to the asynchronous +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-execute-async] +-------------------------------------------------- +<1> The `SimulatePipelineRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `SimulatePipelineResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-ingest-simulate-pipeline-response]] +==== Simulate Pipeline Response + +The returned `SimulatePipelineResponse` allows to retrieve information about the executed + operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-response] +-------------------------------------------------- +<1> Get results for each of the documents provided as instance of `List`. +<2> If the request was in verbose mode cast the response to `SimulateDocumentVerboseResult`. +<3> Check the result after each processor is applied. +<4> Get the ingest document for the result obtained in 3. +<5> Or get the failure for the result obtained in 3. +<6> Get the result as `SimulateDocumentBaseResult` if the result was not verbose. +<7> Get the ingest document for the result obtained in 6. +<8> Or get the failure for the result obtained in 6. diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 727088aa5737f..418eb528f8e00 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -123,10 +123,12 @@ The Java High Level REST Client supports the following Ingest APIs: * <> * <> * <> +* <> include::ingest/put_pipeline.asciidoc[] include::ingest/get_pipeline.asciidoc[] include::ingest/delete_pipeline.asciidoc[] +include::ingest/simulate_pipeline.asciidoc[] == Snapshot APIs diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java index c6252feea276c..f7f76a2bbca7d 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java @@ -19,13 +19,18 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.ingest.IngestDocument; import java.io.IOException; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + /** * Holds the end result of what a pipeline did to sample document provided via the simulate api. */ @@ -33,6 +38,33 @@ public final class SimulateDocumentBaseResult implements SimulateDocumentResult private final WriteableIngestDocument ingestDocument; private final Exception failure; + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "simulate_document_base_result", + true, + a -> { + if (a[1] == null) { + assert a[0] != null; + return new SimulateDocumentBaseResult(((WriteableIngestDocument)a[0]).getIngestDocument()); + } else { + assert a[0] == null; + return new SimulateDocumentBaseResult((ElasticsearchException)a[1]); + } + } + ); + static { + PARSER.declareObject( + optionalConstructorArg(), + WriteableIngestDocument.INGEST_DOC_PARSER, + new ParseField(WriteableIngestDocument.DOC_FIELD) + ); + PARSER.declareObject( + optionalConstructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + new ParseField("error") + ); + } + public SimulateDocumentBaseResult(IngestDocument ingestDocument) { this.ingestDocument = new WriteableIngestDocument(ingestDocument); failure = null; @@ -89,4 +121,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + public static SimulateDocumentBaseResult fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java index 21e802981850c..099e238f2d25e 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java @@ -18,21 +18,38 @@ */ package org.elasticsearch.action.ingest; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + /** * Holds the result of what a pipeline did to a sample document via the simulate api, but instead of {@link SimulateDocumentBaseResult} * this result class holds the intermediate result each processor did to the sample document. */ public final class SimulateDocumentVerboseResult implements SimulateDocumentResult { + public static final String PROCESSOR_RESULT_FIELD = "processor_results"; private final List processorResults; + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "simulate_document_verbose_result", + true, + a -> new SimulateDocumentVerboseResult((List)a[0]) + ); + static { + PARSER.declareObjectArray(constructorArg(), SimulateProcessorResult.PARSER, new ParseField(PROCESSOR_RESULT_FIELD)); + } + public SimulateDocumentVerboseResult(List processorResults) { this.processorResults = processorResults; } @@ -63,7 +80,7 @@ public List getProcessorResults() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.startArray("processor_results"); + builder.startArray(PROCESSOR_RESULT_FIELD); for (SimulateProcessorResult processorResult : processorResults) { processorResult.toXContent(builder, params); } @@ -71,4 +88,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + public static SimulateDocumentVerboseResult fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index 3aa697b8e997c..9a7d6bb7feea9 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -25,6 +25,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -42,7 +44,7 @@ import static org.elasticsearch.ingest.IngestDocument.MetaData; -public class SimulatePipelineRequest extends ActionRequest { +public class SimulatePipelineRequest extends ActionRequest implements ToXContentObject { private String id; private boolean verbose; @@ -126,6 +128,12 @@ public void writeTo(StreamOutput out) throws IOException { } } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.rawValue(source.streamInput(), xContentType); + return builder; + } + public static final class Fields { static final String PIPELINE = "pipeline"; static final String DOCS = "docs"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java index e9ea1a7750738..991e81a14553b 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java @@ -19,22 +19,90 @@ package org.elasticsearch.action.ingest; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + public class SimulatePipelineResponse extends ActionResponse implements ToXContentObject { private String pipelineId; private boolean verbose; private List results; + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "simulate_pipeline_response", + true, + a -> { + List results = (List)a[0]; + boolean verbose = false; + if (results.size() > 0) { + if (results.get(0) instanceof SimulateDocumentVerboseResult) { + verbose = true; + } + } + return new SimulatePipelineResponse(null, verbose, results); + } + ); + static { + PARSER.declareObjectArray( + constructorArg(), + (parser, context) -> { + Token token = parser.currentToken(); + ensureExpectedToken(Token.START_OBJECT, token, parser::getTokenLocation); + SimulateDocumentResult result = null; + while ((token = parser.nextToken()) != Token.END_OBJECT) { + ensureExpectedToken(token, Token.FIELD_NAME, parser::getTokenLocation); + String fieldName = parser.currentName(); + token = parser.nextToken(); + if (token == Token.START_ARRAY) { + if (fieldName.equals(SimulateDocumentVerboseResult.PROCESSOR_RESULT_FIELD)) { + List results = new ArrayList<>(); + while ((token = parser.nextToken()) == Token.START_OBJECT) { + results.add(SimulateProcessorResult.fromXContent(parser)); + } + ensureExpectedToken(Token.END_ARRAY, token, parser::getTokenLocation); + result = new SimulateDocumentVerboseResult(results); + } else { + parser.skipChildren(); + } + } else if (token.equals(Token.START_OBJECT)) { + switch (fieldName) { + case WriteableIngestDocument.DOC_FIELD: + result = new SimulateDocumentBaseResult( + WriteableIngestDocument.INGEST_DOC_PARSER.apply(parser, null).getIngestDocument() + ); + break; + case "error": + result = new SimulateDocumentBaseResult(ElasticsearchException.fromXContent(parser)); + break; + default: + parser.skipChildren(); + break; + } + } // else it is a value skip it + } + assert result != null; + return result; + }, + new ParseField(Fields.DOCUMENTS)); + } + public SimulatePipelineResponse() { } @@ -98,6 +166,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + public static SimulatePipelineResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + static final class Fields { static final String DOCUMENTS = "docs"; } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java index 386a00b391f3c..101ce7ec260e1 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java @@ -19,33 +19,91 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; import java.io.IOException; -class SimulateProcessorResult implements Writeable, ToXContentObject { +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class SimulateProcessorResult implements Writeable, ToXContentObject { + + private static final String IGNORED_ERROR_FIELD = "ignored_error"; private final String processorTag; private final WriteableIngestDocument ingestDocument; private final Exception failure; - SimulateProcessorResult(String processorTag, IngestDocument ingestDocument, Exception failure) { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser IGNORED_ERROR_PARSER = + new ConstructingObjectParser<>( + "ignored_error_parser", + true, + a -> (ElasticsearchException)a[0] + ); + static { + IGNORED_ERROR_PARSER.declareObject( + constructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + new ParseField("error") + ); + } + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "simulate_processor_result", + true, + a -> { + String processorTag = a[0] == null ? null : (String)a[0]; + IngestDocument document = a[1] == null ? null : ((WriteableIngestDocument)a[1]).getIngestDocument(); + Exception failure = null; + if (a[2] != null) { + failure = (ElasticsearchException)a[2]; + } else if (a[3] != null) { + failure = (ElasticsearchException)a[3]; + } + return new SimulateProcessorResult(processorTag, document, failure); + } + ); + static { + PARSER.declareString(optionalConstructorArg(), new ParseField(ConfigurationUtils.TAG_KEY)); + PARSER.declareObject( + optionalConstructorArg(), + WriteableIngestDocument.INGEST_DOC_PARSER, + new ParseField(WriteableIngestDocument.DOC_FIELD) + ); + PARSER.declareObject( + optionalConstructorArg(), + IGNORED_ERROR_PARSER, + new ParseField(IGNORED_ERROR_FIELD) + ); + PARSER.declareObject( + optionalConstructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + new ParseField("error") + ); + } + + public SimulateProcessorResult(String processorTag, IngestDocument ingestDocument, Exception failure) { this.processorTag = processorTag; this.ingestDocument = (ingestDocument == null) ? null : new WriteableIngestDocument(ingestDocument); this.failure = failure; } - SimulateProcessorResult(String processorTag, IngestDocument ingestDocument) { + public SimulateProcessorResult(String processorTag, IngestDocument ingestDocument) { this(processorTag, ingestDocument, null); } - SimulateProcessorResult(String processorTag, Exception failure) { + public SimulateProcessorResult(String processorTag, Exception failure) { this(processorTag, null, failure); } @@ -98,7 +156,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } if (failure != null && ingestDocument != null) { - builder.startObject("ignored_error"); + builder.startObject(IGNORED_ERROR_FIELD); ElasticsearchException.generateFailureXContent(builder, params, failure, true); builder.endObject(); } else if (failure != null) { @@ -112,4 +170,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + public static SimulateProcessorResult fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java index 87168cb7a9bba..2430868bb5909 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java @@ -20,24 +20,91 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.IngestDocument.MetaData; import java.io.IOException; import java.time.ZoneId; +import java.time.ZonedDateTime; import java.util.Date; +import java.util.HashMap; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + final class WriteableIngestDocument implements Writeable, ToXContentFragment { + static final String SOURCE_FIELD = "_source"; + static final String INGEST_FIELD = "_ingest"; + static final String DOC_FIELD = "doc"; private final IngestDocument ingestDocument; + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser INGEST_DOC_PARSER = + new ConstructingObjectParser<>( + "ingest_document", + true, + a -> { + HashMap sourceAndMetadata = new HashMap<>(); + sourceAndMetadata.put(MetaData.INDEX.getFieldName(), a[0]); + sourceAndMetadata.put(MetaData.TYPE.getFieldName(), a[1]); + sourceAndMetadata.put(MetaData.ID.getFieldName(), a[2]); + if (a[3] != null) { + sourceAndMetadata.put(MetaData.ROUTING.getFieldName(), a[3]); + } + if (a[4] != null) { + sourceAndMetadata.put(MetaData.VERSION.getFieldName(), a[4]); + } + if (a[5] != null) { + sourceAndMetadata.put(MetaData.VERSION_TYPE.getFieldName(), a[5]); + } + sourceAndMetadata.putAll((Map)a[6]); + return new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, (Map)a[7])); + } + ); + static { + INGEST_DOC_PARSER.declareString(constructorArg(), new ParseField(MetaData.INDEX.getFieldName())); + INGEST_DOC_PARSER.declareString(constructorArg(), new ParseField(MetaData.TYPE.getFieldName())); + INGEST_DOC_PARSER.declareString(constructorArg(), new ParseField(MetaData.ID.getFieldName())); + INGEST_DOC_PARSER.declareString(optionalConstructorArg(), new ParseField(MetaData.ROUTING.getFieldName())); + INGEST_DOC_PARSER.declareLong(optionalConstructorArg(), new ParseField(MetaData.VERSION.getFieldName())); + INGEST_DOC_PARSER.declareString(optionalConstructorArg(), new ParseField(MetaData.VERSION_TYPE.getFieldName())); + INGEST_DOC_PARSER.declareObject(constructorArg(), (p, c) -> p.map(), new ParseField(SOURCE_FIELD)); + INGEST_DOC_PARSER.declareObject( + constructorArg(), + (p, c) -> { + Map ingestMap = p.map(); + ingestMap.computeIfPresent( + "timestamp", + (k, o) -> ZonedDateTime.parse((String)o) + ); + return ingestMap; + }, + new ParseField(INGEST_FIELD) + ); + } + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "writeable_ingest_document", + true, + a -> (WriteableIngestDocument)a[0] + ); + static { + PARSER.declareObject(constructorArg(), INGEST_DOC_PARSER, new ParseField(DOC_FIELD)); + } + WriteableIngestDocument(IngestDocument ingestDocument) { assert ingestDocument != null; this.ingestDocument = ingestDocument; @@ -67,19 +134,25 @@ IngestDocument getIngestDocument() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject("doc"); - Map metadataMap = ingestDocument.extractMetadata(); + builder.startObject(DOC_FIELD); + Map metadataMap = ingestDocument.getMetadata(); for (Map.Entry metadata : metadataMap.entrySet()) { if (metadata.getValue() != null) { builder.field(metadata.getKey().getFieldName(), metadata.getValue().toString()); } } - builder.field("_source", ingestDocument.getSourceAndMetadata()); - builder.field("_ingest", ingestDocument.getIngestMetadata()); + Map source = IngestDocument.deepCopyMap(ingestDocument.getSourceAndMetadata()); + metadataMap.keySet().forEach(mD -> source.remove(mD.getFieldName())); + builder.field(SOURCE_FIELD, source); + builder.field(INGEST_FIELD, ingestDocument.getIngestMetadata()); builder.endObject(); return builder; } + public static WriteableIngestDocument fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + @Override public boolean equals(Object o) { if (this == o) { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index e31a97dc2c6ce..2bd842e72b107 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -570,6 +570,17 @@ public Map extractMetadata() { return metadataMap; } + /** + * Does the same thing as {@link #extractMetadata} but does not mutate the map. + */ + public Map getMetadata() { + Map metadataMap = new EnumMap<>(MetaData.class); + for (MetaData metaData : MetaData.values()) { + metadataMap.put(metaData, sourceAndMetadata.get(metaData.getFieldName())); + } + return metadataMap; + } + /** * Returns the available ingest metadata fields, by default only timestamp, but it is possible to set additional ones. * Use only for reading values, modify them instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)} @@ -588,7 +599,7 @@ public Map getSourceAndMetadata() { } @SuppressWarnings("unchecked") - private static Map deepCopyMap(Map source) { + public static Map deepCopyMap(Map source) { return (Map) deepCopy(source); } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResultTests.java new file mode 100644 index 0000000000000..bfa6c1eb9b8c3 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResultTests.java @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.ingest; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.StringJoiner; +import java.util.function.Predicate; +import java.util.function.Supplier; + +import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.elasticsearch.action.ingest.WriteableIngestDocumentTests.createRandomIngestDoc; + +public class SimulateDocumentBaseResultTests extends AbstractXContentTestCase { + + public void testSerialization() throws IOException { + boolean isFailure = randomBoolean(); + SimulateDocumentBaseResult simulateDocumentBaseResult = createTestInstance(isFailure); + + BytesStreamOutput out = new BytesStreamOutput(); + simulateDocumentBaseResult.writeTo(out); + StreamInput streamInput = out.bytes().streamInput(); + SimulateDocumentBaseResult otherSimulateDocumentBaseResult = new SimulateDocumentBaseResult(streamInput); + + if (isFailure) { + assertThat(otherSimulateDocumentBaseResult.getIngestDocument(), equalTo(simulateDocumentBaseResult.getIngestDocument())); + assertThat(otherSimulateDocumentBaseResult.getFailure(), instanceOf(IllegalArgumentException.class)); + IllegalArgumentException e = (IllegalArgumentException) otherSimulateDocumentBaseResult.getFailure(); + assertThat(e.getMessage(), equalTo("test")); + } else { + assertIngestDocument(otherSimulateDocumentBaseResult.getIngestDocument(), simulateDocumentBaseResult.getIngestDocument()); + } + } + + static SimulateDocumentBaseResult createTestInstance(boolean isFailure) { + SimulateDocumentBaseResult simulateDocumentBaseResult; + if (isFailure) { + simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test")); + } else { + IngestDocument ingestDocument = createRandomIngestDoc(); + simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument); + } + return simulateDocumentBaseResult; + } + + private static SimulateDocumentBaseResult createTestInstanceWithFailures() { + return createTestInstance(randomBoolean()); + } + + @Override + protected SimulateDocumentBaseResult createTestInstance() { + return createTestInstance(false); + } + + @Override + protected SimulateDocumentBaseResult doParseInstance(XContentParser parser) { + return SimulateDocumentBaseResult.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } + + public static void assertEqualDocs(SimulateDocumentBaseResult response, SimulateDocumentBaseResult parsedResponse) { + assertEquals(response.getIngestDocument(), parsedResponse.getIngestDocument()); + if (response.getFailure() != null) { + assertNotNull(parsedResponse.getFailure()); + assertThat( + parsedResponse.getFailure().getMessage(), + containsString(response.getFailure().getMessage()) + ); + } else { + assertNull(parsedResponse.getFailure()); + } + } + + @Override + public void assertEqualInstances(SimulateDocumentBaseResult response, SimulateDocumentBaseResult parsedResponse) { + assertEqualDocs(response, parsedResponse); + } + + /** + * Test parsing {@link SimulateDocumentBaseResult} with inner failures as they don't support asserting on xcontent + * equivalence, given that exceptions are not parsed back as the same original class. We run the usual + * {@link AbstractXContentTestCase#testFromXContent()} without failures, and this other test with failures where + * we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = SimulateDocumentBaseResultTests::createTestInstanceWithFailures; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields(), + getShuffleFieldsExceptions(), getRandomFieldsExcludeFilter(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, getToXContentParams()); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java deleted file mode 100644 index 83aad26f6a07b..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.ingest; - -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; - -import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.instanceOf; - -public class SimulateDocumentSimpleResultTests extends ESTestCase { - - public void testSerialization() throws IOException { - boolean isFailure = randomBoolean(); - SimulateDocumentBaseResult simulateDocumentBaseResult; - if (isFailure) { - simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test")); - } else { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument); - } - - BytesStreamOutput out = new BytesStreamOutput(); - simulateDocumentBaseResult.writeTo(out); - StreamInput streamInput = out.bytes().streamInput(); - SimulateDocumentBaseResult otherSimulateDocumentBaseResult = new SimulateDocumentBaseResult(streamInput); - - if (isFailure) { - assertThat(otherSimulateDocumentBaseResult.getIngestDocument(), equalTo(simulateDocumentBaseResult.getIngestDocument())); - assertThat(otherSimulateDocumentBaseResult.getFailure(), instanceOf(IllegalArgumentException.class)); - IllegalArgumentException e = (IllegalArgumentException) otherSimulateDocumentBaseResult.getFailure(); - assertThat(e.getMessage(), equalTo("test")); - } else { - assertIngestDocument(otherSimulateDocumentBaseResult.getIngestDocument(), simulateDocumentBaseResult.getIngestDocument()); - } - } -} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java new file mode 100644 index 0000000000000..5701bcc27800f --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.ingest; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.StringJoiner; +import java.util.function.Predicate; +import java.util.function.Supplier; + +public class SimulateDocumentVerboseResultTests extends AbstractXContentTestCase { + + static SimulateDocumentVerboseResult createTestInstance(boolean withFailures) { + int numDocs = randomIntBetween(0, 10); + List results = new ArrayList<>(); + for (int i = 0; i getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } + + /** + * Test parsing {@link SimulateDocumentVerboseResult} with inner failures as they don't support asserting on xcontent + * equivalence, given that exceptions are not parsed back as the same original class. We run the usual + * {@link AbstractXContentTestCase#testFromXContent()} without failures, and this other test with failures where we + * disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = SimulateDocumentVerboseResultTests::createTestInstanceWithFailures; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields(), + getShuffleFieldsExceptions(), getRandomFieldsExcludeFilter(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, getToXContentParams()); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java index be448a09db892..60bad4aad460f 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java @@ -21,57 +21,29 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.StringJoiner; +import java.util.function.Predicate; +import java.util.function.Supplier; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.nullValue; -public class SimulatePipelineResponseTests extends ESTestCase { +public class SimulatePipelineResponseTests extends AbstractXContentTestCase { public void testSerialization() throws IOException { boolean isVerbose = randomBoolean(); String id = randomBoolean() ? randomAlphaOfLengthBetween(1, 10) : null; - int numResults = randomIntBetween(1, 10); - List results = new ArrayList<>(numResults); - for (int i = 0; i < numResults; i++) { - boolean isFailure = randomBoolean(); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - if (isVerbose) { - int numProcessors = randomIntBetween(1, 10); - List processorResults = new ArrayList<>(numProcessors); - for (int j = 0; j < numProcessors; j++) { - String processorTag = randomAlphaOfLengthBetween(1, 10); - SimulateProcessorResult processorResult; - if (isFailure) { - processorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); - } else { - processorResult = new SimulateProcessorResult(processorTag, ingestDocument); - } - processorResults.add(processorResult); - } - results.add(new SimulateDocumentVerboseResult(processorResults)); - } else { - results.add(new SimulateDocumentBaseResult(ingestDocument)); - SimulateDocumentBaseResult simulateDocumentBaseResult; - if (isFailure) { - simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test")); - } else { - simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument); - } - results.add(simulateDocumentBaseResult); - } - } - SimulatePipelineResponse response = new SimulatePipelineResponse(id, isVerbose, results); + SimulatePipelineResponse response = createInstance(id, isVerbose, true); BytesStreamOutput out = new BytesStreamOutput(); response.writeTo(out); StreamInput streamInput = out.bytes().streamInput(); @@ -120,4 +92,97 @@ public void testSerialization() throws IOException { } } } + + static SimulatePipelineResponse createInstance(String pipelineId, boolean isVerbose, boolean withFailure) { + int numResults = randomIntBetween(1, 10); + List results = new ArrayList<>(numResults); + for (int i = 0; i < numResults; i++) { + if (isVerbose) { + results.add( + SimulateDocumentVerboseResultTests.createTestInstance(withFailure) + ); + } else { + results.add( + SimulateDocumentBaseResultTests.createTestInstance(withFailure && randomBoolean()) + ); + } + } + return new SimulatePipelineResponse(pipelineId, isVerbose, results); + } + + private static SimulatePipelineResponse createTestInstanceWithFailures() { + boolean isVerbose = randomBoolean(); + return createInstance(null, isVerbose, false); + } + + @Override + protected SimulatePipelineResponse createTestInstance() { + boolean isVerbose = randomBoolean(); + // since the pipeline id is not serialized with XContent we set it to null for equality tests. + // we test failures separately since comparing XContent is not possible with failures + return createInstance(null, isVerbose, false); + } + + @Override + protected SimulatePipelineResponse doParseInstance(XContentParser parser) { + return SimulatePipelineResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected void assertEqualInstances(SimulatePipelineResponse response, + SimulatePipelineResponse parsedResponse) { + assertEquals(response.getPipelineId(), parsedResponse.getPipelineId()); + assertEquals(response.isVerbose(), parsedResponse.isVerbose()); + assertEquals(response.getResults().size(), parsedResponse.getResults().size()); + for (int i=0; i < response.getResults().size(); i++) { + if (response.isVerbose()) { + assertThat(response.getResults().get(i), instanceOf(SimulateDocumentVerboseResult.class)); + assertThat(parsedResponse.getResults().get(i), instanceOf(SimulateDocumentVerboseResult.class)); + SimulateDocumentVerboseResult responseResult = (SimulateDocumentVerboseResult)response.getResults().get(i); + SimulateDocumentVerboseResult parsedResult = (SimulateDocumentVerboseResult)parsedResponse.getResults().get(i); + SimulateDocumentVerboseResultTests.assertEqualDocs(responseResult, parsedResult); + } else { + assertThat(response.getResults().get(i), instanceOf(SimulateDocumentBaseResult.class)); + assertThat(parsedResponse.getResults().get(i), instanceOf(SimulateDocumentBaseResult.class)); + SimulateDocumentBaseResult responseResult = (SimulateDocumentBaseResult)response.getResults().get(i); + SimulateDocumentBaseResult parsedResult = (SimulateDocumentBaseResult)parsedResponse.getResults().get(i); + SimulateDocumentBaseResultTests.assertEqualDocs(responseResult, parsedResult); + } + } + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } + + /** + * Test parsing {@link SimulatePipelineResponse} with inner failures as they don't support asserting on xcontent equivalence, given that + * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} + * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = SimulatePipelineResponseTests::createTestInstanceWithFailures; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields(), getShuffleFieldsExceptions(), + getRandomFieldsExcludeFilter(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, getToXContentParams()); + } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java index 3014a1a4ae61d..2e0d6a75749bb 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java @@ -21,35 +21,29 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; +import java.util.StringJoiner; +import java.util.function.Predicate; +import java.util.function.Supplier; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; +import static org.elasticsearch.action.ingest.WriteableIngestDocumentTests.createRandomIngestDoc; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -public class SimulateProcessorResultTests extends ESTestCase { +public class SimulateProcessorResultTests extends AbstractXContentTestCase { public void testSerialization() throws IOException { - String processorTag = randomAlphaOfLengthBetween(1, 10); boolean isSuccessful = randomBoolean(); boolean isIgnoredException = randomBoolean(); - SimulateProcessorResult simulateProcessorResult; - if (isSuccessful) { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - if (isIgnoredException) { - simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument, new IllegalArgumentException("test")); - } else { - simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument); - } - } else { - simulateProcessorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); - } + SimulateProcessorResult simulateProcessorResult = createTestInstance(isSuccessful, isIgnoredException); BytesStreamOutput out = new BytesStreamOutput(); simulateProcessorResult.writeTo(out); @@ -72,4 +66,96 @@ public void testSerialization() throws IOException { assertThat(e.getMessage(), equalTo("test")); } } + + static SimulateProcessorResult createTestInstance(boolean isSuccessful, + boolean isIgnoredException) { + String processorTag = randomAlphaOfLengthBetween(1, 10); + SimulateProcessorResult simulateProcessorResult; + if (isSuccessful) { + IngestDocument ingestDocument = createRandomIngestDoc(); + if (isIgnoredException) { + simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument, new IllegalArgumentException("test")); + } else { + simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument); + } + } else { + simulateProcessorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); + } + return simulateProcessorResult; + } + + private static SimulateProcessorResult createTestInstanceWithFailures() { + boolean isSuccessful = randomBoolean(); + boolean isIgnoredException = randomBoolean(); + return createTestInstance(isSuccessful, isIgnoredException); + } + + @Override + protected SimulateProcessorResult createTestInstance() { + // we test failures separately since comparing XContent is not possible with failures + return createTestInstance(true, false); + } + + @Override + protected SimulateProcessorResult doParseInstance(XContentParser parser) { + return SimulateProcessorResult.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.startsWith( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.startsWith( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } + + static void assertEqualProcessorResults(SimulateProcessorResult response, + SimulateProcessorResult parsedResponse) { + assertEquals(response.getProcessorTag(), parsedResponse.getProcessorTag()); + assertEquals(response.getIngestDocument(), parsedResponse.getIngestDocument()); + if (response.getFailure() != null ) { + assertNotNull(parsedResponse.getFailure()); + assertThat( + parsedResponse.getFailure().getMessage(), + containsString(response.getFailure().getMessage()) + ); + } else { + assertNull(parsedResponse.getFailure()); + } + } + + @Override + protected void assertEqualInstances(SimulateProcessorResult response, SimulateProcessorResult parsedResponse) { + assertEqualProcessorResults(response, parsedResponse); + } + + /** + * Test parsing {@link SimulateProcessorResult} with inner failures as they don't support asserting on xcontent equivalence, given that + * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} + * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = SimulateProcessorResultTests::createTestInstanceWithFailures; + //with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, + //but that does not bother our assertions, as we only want to test that we don't break. + boolean supportsUnknownFields = true; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, + getShuffleFieldsExceptions(), getRandomFieldsExcludeFilter(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, getToXContentParams()); + } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java b/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java index 4d8e0f544c458..bc4589ff5d36c 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java @@ -25,14 +25,19 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.test.RandomObjects; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.StringJoiner; +import java.util.function.Predicate; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; @@ -40,7 +45,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; -public class WriteableIngestDocumentTests extends ESTestCase { +public class WriteableIngestDocumentTests extends AbstractXContentTestCase { public void testEqualsAndHashcode() throws Exception { Map sourceAndMetadata = RandomDocumentPicks.randomSource(random()); @@ -147,4 +152,42 @@ public void testToXContent() throws IOException { IngestDocument serializedIngestDocument = new IngestDocument(toXContentSource, toXContentIngestMetadata); assertThat(serializedIngestDocument, equalTo(serializedIngestDocument)); } + + static IngestDocument createRandomIngestDoc() { + XContentType xContentType = randomFrom(XContentType.values()); + BytesReference sourceBytes = RandomObjects.randomSource(random(), xContentType); + Map randomSource = XContentHelper.convertToMap(sourceBytes, false, xContentType).v2(); + return RandomDocumentPicks.randomIngestDocument(random(), randomSource); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected WriteableIngestDocument createTestInstance() { + return new WriteableIngestDocument(createRandomIngestDoc()); + } + + @Override + protected WriteableIngestDocument doParseInstance(XContentParser parser) { + return WriteableIngestDocument.fromXContent(parser); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.startsWith( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.startsWith( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } } From 8ae2049889766e4c8cbe67bd3f0d1d9998c542a5 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 Jun 2018 13:46:48 +0200 Subject: [PATCH 20/34] Avoid deprecation warning when running the ML datafeed extractor. (#31463) In #29639 we added a `format` option to doc-value fields and deprecated usage of doc-value fields without a format so that we could migrate doc-value fields to use the format that comes with the mappings by default. However I missed to fix the machine-learning datafeed extractor. --- .../ml/datafeed/extractor/scroll/ExtractedField.java | 8 +++++++- .../ml/datafeed/extractor/scroll/ScrollDataExtractor.java | 8 +++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedField.java index c2d866563d638..ef0dffa269114 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedField.java @@ -103,7 +103,13 @@ public Object[] value(SearchHit hit) { if (value.length != 1) { return value; } - value[0] = ((BaseDateTime) value[0]).getMillis(); + if (value[0] instanceof String) { // doc_value field with the epoch_millis format + value[0] = Long.parseLong((String) value[0]); + } else if (value[0] instanceof BaseDateTime) { // script field + value[0] = ((BaseDateTime) value[0]).getMillis(); + } else { + throw new IllegalStateException("Unexpected value for a time field: " + value[0].getClass()); + } return value; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java index bbd9f9ad533aa..57681a0aafbb2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java @@ -20,6 +20,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.StoredFieldsContext; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; @@ -47,6 +48,7 @@ class ScrollDataExtractor implements DataExtractor { private static final Logger LOGGER = Loggers.getLogger(ScrollDataExtractor.class); private static final TimeValue SCROLL_TIMEOUT = new TimeValue(30, TimeUnit.MINUTES); + private static final String EPOCH_MILLIS_FORMAT = "epoch_millis"; private final Client client; private final ScrollDataExtractorContext context; @@ -115,7 +117,11 @@ private SearchRequestBuilder buildSearchRequest(long start) { context.query, context.extractedFields.timeField(), start, context.end)); for (String docValueField : context.extractedFields.getDocValueFields()) { - searchRequestBuilder.addDocValueField(docValueField); + if (docValueField.equals(context.extractedFields.timeField())) { + searchRequestBuilder.addDocValueField(docValueField, EPOCH_MILLIS_FORMAT); + } else { + searchRequestBuilder.addDocValueField(docValueField, DocValueFieldsContext.USE_DEFAULT_FORMAT); + } } String[] sourceFields = context.extractedFields.getSourceFields(); if (sourceFields.length == 0) { From f22f91c57a6199ddadb19b2bf839d3ac7c3e2fbd Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Fri, 22 Jun 2018 15:31:23 +0200 Subject: [PATCH 21/34] Allow multiple unicast host providers (#31509) Introduces support for multiple host providers, which allows the settings based hosts resolver to be treated just as any other UnicastHostsProvider. Also introduces the notion of a HostsResolver so that plugins such as FileBasedDiscovery do not need to create their own thread pool for resolving hosts, making it easier to add new similar kind of plugins. --- .../classic/AzureUnicastHostsProvider.java | 2 +- .../ec2/AwsEc2UnicastHostsProvider.java | 2 +- .../discovery/ec2/Ec2DiscoveryTests.java | 8 +- .../file/FileBasedDiscoveryPlugin.java | 48 +----------- .../file/FileBasedUnicastHostsProvider.java | 34 +-------- .../FileBasedUnicastHostsProviderTests.java | 12 ++- .../gce/GceUnicastHostsProvider.java | 2 +- .../discovery/gce/GceDiscoveryTests.java | 2 +- .../common/settings/ClusterSettings.java | 3 +- .../discovery/DiscoveryModule.java | 47 ++++++++---- .../zen/SettingsBasedHostsProvider.java | 75 +++++++++++++++++++ .../discovery/zen/UnicastHostsProvider.java | 12 ++- .../discovery/zen/UnicastZenPing.java | 64 ++++++---------- .../discovery/DiscoveryModuleTests.java | 36 ++++++++- .../single/SingleNodeDiscoveryIT.java | 2 +- .../discovery/zen/UnicastZenPingTests.java | 45 ++++++----- .../discovery/zen/ZenDiscoveryUnitTests.java | 2 +- .../discovery/MockUncasedHostProvider.java | 2 +- .../test/discovery/TestZenDiscovery.java | 4 +- 19 files changed, 224 insertions(+), 178 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java index 482dafb008fc5..1a9265de2a72f 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java @@ -132,7 +132,7 @@ public AzureUnicastHostsProvider(Settings settings, AzureComputeService azureCom * Setting `cloud.azure.refresh_interval` to `0` will disable caching (default). */ @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { if (refreshInterval.millis() != 0) { if (dynamicHosts != null && (refreshInterval.millis() < 0 || (System.currentTimeMillis() - lastRefresh) < refreshInterval.millis())) { diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java index 396e9f707d404..8f5037042986b 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java @@ -92,7 +92,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos } @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { return dynamicHosts.getOrRefresh(); } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index 9dc2e02edc1b5..295df0c818a91 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -93,7 +93,7 @@ protected List buildDynamicHosts(Settings nodeSettings, int no protected List buildDynamicHosts(Settings nodeSettings, int nodes, List> tagsList) { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY, nodes, tagsList)) { AwsEc2UnicastHostsProvider provider = new AwsEc2UnicastHostsProvider(nodeSettings, transportService, plugin.ec2Service); - List dynamicHosts = provider.buildDynamicHosts(); + List dynamicHosts = provider.buildDynamicHosts(null); logger.debug("--> addresses found: {}", dynamicHosts); return dynamicHosts; } catch (IOException e) { @@ -307,7 +307,7 @@ protected List fetchDynamicNodes() { } }; for (int i=0; i<3; i++) { - provider.buildDynamicHosts(); + provider.buildDynamicHosts(null); } assertThat(provider.fetchCount, is(3)); } @@ -324,12 +324,12 @@ protected List fetchDynamicNodes() { } }; for (int i=0; i<3; i++) { - provider.buildDynamicHosts(); + provider.buildDynamicHosts(null); } assertThat(provider.fetchCount, is(1)); Thread.sleep(1_000L); // wait for cache to expire for (int i=0; i<3; i++) { - provider.buildDynamicHosts(); + provider.buildDynamicHosts(null); } assertThat(provider.fetchCount, is(2)); } diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java index fb37b3bc01104..4d26447078597 100644 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java @@ -19,35 +19,17 @@ package org.elasticsearch.discovery.file; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.discovery.zen.UnicastHostsProvider; -import org.elasticsearch.discovery.zen.UnicastZenPing; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.node.Node; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.watcher.ResourceWatcherService; -import java.io.IOException; import java.nio.file.Path; -import java.util.Collection; import java.util.Collections; import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.TimeUnit; import java.util.function.Supplier; /** @@ -57,47 +39,19 @@ */ public class FileBasedDiscoveryPlugin extends Plugin implements DiscoveryPlugin { - private static final Logger logger = Loggers.getLogger(FileBasedDiscoveryPlugin.class); - private final Settings settings; private final Path configPath; - private ExecutorService fileBasedDiscoveryExecutorService; public FileBasedDiscoveryPlugin(Settings settings, Path configPath) { this.settings = settings; this.configPath = configPath; } - @Override - public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, ScriptService scriptService, - NamedXContentRegistry xContentRegistry, Environment environment, - NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { - final int concurrentConnects = UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING.get(settings); - final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(settings, "[file_based_discovery_resolve]"); - fileBasedDiscoveryExecutorService = EsExecutors.newScaling( - Node.NODE_NAME_SETTING.get(settings) + "/" + "file_based_discovery_resolve", - 0, - concurrentConnects, - 60, - TimeUnit.SECONDS, - threadFactory, - threadPool.getThreadContext()); - - return Collections.emptyList(); - } - - @Override - public void close() throws IOException { - ThreadPool.terminate(fileBasedDiscoveryExecutorService, 0, TimeUnit.SECONDS); - } - @Override public Map> getZenHostsProviders(TransportService transportService, NetworkService networkService) { return Collections.singletonMap( "file", - () -> new FileBasedUnicastHostsProvider( - new Environment(settings, configPath), transportService, fileBasedDiscoveryExecutorService)); + () -> new FileBasedUnicastHostsProvider(new Environment(settings, configPath))); } } diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java index 7abcb4454720c..584ae4de5a2b5 100644 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java @@ -23,26 +23,19 @@ import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.env.Environment; -import org.elasticsearch.transport.TransportService; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; -import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.concurrent.ExecutorService; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.discovery.zen.UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT; -import static org.elasticsearch.discovery.zen.UnicastZenPing.resolveHostsLists; - /** * An implementation of {@link UnicastHostsProvider} that reads hosts/ports * from {@link #UNICAST_HOSTS_FILE}. @@ -59,23 +52,15 @@ class FileBasedUnicastHostsProvider extends AbstractComponent implements Unicast static final String UNICAST_HOSTS_FILE = "unicast_hosts.txt"; - private final TransportService transportService; - private final ExecutorService executorService; - private final Path unicastHostsFilePath; - private final TimeValue resolveTimeout; - - FileBasedUnicastHostsProvider(Environment environment, TransportService transportService, ExecutorService executorService) { + FileBasedUnicastHostsProvider(Environment environment) { super(environment.settings()); - this.transportService = transportService; - this.executorService = executorService; this.unicastHostsFilePath = environment.configFile().resolve("discovery-file").resolve(UNICAST_HOSTS_FILE); - this.resolveTimeout = DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT.get(settings); } @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { List hostsList; try (Stream lines = Files.lines(unicastHostsFilePath)) { hostsList = lines.filter(line -> line.startsWith("#") == false) // lines starting with `#` are comments @@ -90,21 +75,8 @@ public List buildDynamicHosts() { hostsList = Collections.emptyList(); } - final List dynamicHosts = new ArrayList<>(); - try { - dynamicHosts.addAll(resolveHostsLists( - executorService, - logger, - hostsList, - 1, - transportService, - resolveTimeout)); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - + final List dynamicHosts = hostsResolver.resolveHosts(hostsList, 1); logger.debug("[discovery-file] Using dynamic discovery nodes {}", dynamicHosts); - return dynamicHosts; } diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java index 860d3537635d5..5837d3bcdfe3f 100644 --- a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java +++ b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java @@ -24,7 +24,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.discovery.zen.UnicastZenPing; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -123,8 +125,10 @@ public void testUnicastHostsDoesNotExist() throws Exception { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); final Environment environment = TestEnvironment.newEnvironment(settings); - final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(environment, transportService, executorService); - final List addresses = provider.buildDynamicHosts(); + final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(environment); + final List addresses = provider.buildDynamicHosts((hosts, limitPortCounts) -> + UnicastZenPing.resolveHostsLists(executorService, logger, hosts, limitPortCounts, transportService, + TimeValue.timeValueSeconds(10))); assertEquals(0, addresses.size()); } @@ -163,6 +167,8 @@ private List setupAndRunHostProvider(final List hostEn } return new FileBasedUnicastHostsProvider( - new Environment(settings, configPath), transportService, executorService).buildDynamicHosts(); + new Environment(settings, configPath)).buildDynamicHosts((hosts, limitPortCounts) -> + UnicastZenPing.resolveHostsLists(executorService, logger, hosts, limitPortCounts, transportService, + TimeValue.timeValueSeconds(10))); } } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index 790d70a8b99b0..778c38697c5ec 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -93,7 +93,7 @@ public GceUnicastHostsProvider(Settings settings, GceInstancesService gceInstanc * Information can be cached using `cloud.gce.refresh_interval` property if needed. */ @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { // We check that needed properties have been set if (this.project == null || this.project.isEmpty() || this.zones == null || this.zones.isEmpty()) { throw new IllegalArgumentException("one or more gce discovery settings are missing. " + diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java index a1944a15d8036..816152186e761 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java @@ -108,7 +108,7 @@ protected List buildDynamicNodes(GceInstancesServiceImpl gceIn GceUnicastHostsProvider provider = new GceUnicastHostsProvider(nodeSettings, gceInstancesService, transportService, new NetworkService(Collections.emptyList())); - List dynamicHosts = provider.buildDynamicHosts(); + List dynamicHosts = provider.buildDynamicHosts(null); logger.info("--> addresses found: {}", dynamicHosts); return dynamicHosts; } diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index e616613a425a9..478325c66f983 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -56,6 +56,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ElectMasterService; import org.elasticsearch.discovery.zen.FaultDetection; +import org.elasticsearch.discovery.zen.SettingsBasedHostsProvider; import org.elasticsearch.discovery.zen.UnicastZenPing; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.env.Environment; @@ -357,7 +358,7 @@ public void apply(Settings value, Settings current, Settings previous) { ZenDiscovery.MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING, ZenDiscovery.MASTER_ELECTION_IGNORE_NON_MASTER_PINGS_SETTING, ZenDiscovery.MAX_PENDING_CLUSTER_STATES_SETTING, - UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING, + SettingsBasedHostsProvider.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING, UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING, UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT, SearchService.DEFAULT_KEEPALIVE_SETTING, diff --git a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index 179692cd516c8..e47fe7a7a70ed 100644 --- a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -31,7 +31,9 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.single.SingleNodeDiscovery; +import org.elasticsearch.discovery.zen.SettingsBasedHostsProvider; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.plugins.DiscoveryPlugin; @@ -42,13 +44,15 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; +import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Supplier; +import java.util.stream.Collectors; /** * A module for loading classes for node discovery. @@ -57,8 +61,8 @@ public class DiscoveryModule { public static final Setting DISCOVERY_TYPE_SETTING = new Setting<>("discovery.type", "zen", Function.identity(), Property.NodeScope); - public static final Setting> DISCOVERY_HOSTS_PROVIDER_SETTING = - new Setting<>("discovery.zen.hosts_provider", (String)null, Optional::ofNullable, Property.NodeScope); + public static final Setting> DISCOVERY_HOSTS_PROVIDER_SETTING = + Setting.listSetting("discovery.zen.hosts_provider", Collections.emptyList(), Function.identity(), Property.NodeScope); private final Discovery discovery; @@ -66,9 +70,9 @@ public DiscoveryModule(Settings settings, ThreadPool threadPool, TransportServic NamedWriteableRegistry namedWriteableRegistry, NetworkService networkService, MasterService masterService, ClusterApplier clusterApplier, ClusterSettings clusterSettings, List plugins, AllocationService allocationService) { - final UnicastHostsProvider hostsProvider; final Collection> joinValidators = new ArrayList<>(); - Map> hostProviders = new HashMap<>(); + final Map> hostProviders = new HashMap<>(); + hostProviders.put("settings", () -> new SettingsBasedHostsProvider(settings, transportService)); for (DiscoveryPlugin plugin : plugins) { plugin.getZenHostsProviders(transportService, networkService).entrySet().forEach(entry -> { if (hostProviders.put(entry.getKey(), entry.getValue()) != null) { @@ -80,17 +84,32 @@ public DiscoveryModule(Settings settings, ThreadPool threadPool, TransportServic joinValidators.add(joinValidator); } } - Optional hostsProviderName = DISCOVERY_HOSTS_PROVIDER_SETTING.get(settings); - if (hostsProviderName.isPresent()) { - Supplier hostsProviderSupplier = hostProviders.get(hostsProviderName.get()); - if (hostsProviderSupplier == null) { - throw new IllegalArgumentException("Unknown zen hosts provider [" + hostsProviderName.get() + "]"); - } - hostsProvider = Objects.requireNonNull(hostsProviderSupplier.get()); - } else { - hostsProvider = Collections::emptyList; + List hostsProviderNames = DISCOVERY_HOSTS_PROVIDER_SETTING.get(settings); + // for bwc purposes, add settings provider even if not explicitly specified + if (hostsProviderNames.contains("settings") == false) { + List extendedHostsProviderNames = new ArrayList<>(); + extendedHostsProviderNames.add("settings"); + extendedHostsProviderNames.addAll(hostsProviderNames); + hostsProviderNames = extendedHostsProviderNames; + } + + final Set missingProviderNames = new HashSet<>(hostsProviderNames); + missingProviderNames.removeAll(hostProviders.keySet()); + if (missingProviderNames.isEmpty() == false) { + throw new IllegalArgumentException("Unknown zen hosts providers " + missingProviderNames); } + List filteredHostsProviders = hostsProviderNames.stream() + .map(hostProviders::get).map(Supplier::get).collect(Collectors.toList()); + + final UnicastHostsProvider hostsProvider = hostsResolver -> { + final List addresses = new ArrayList<>(); + for (UnicastHostsProvider provider : filteredHostsProviders) { + addresses.addAll(provider.buildDynamicHosts(hostsResolver)); + } + return Collections.unmodifiableList(addresses); + }; + Map> discoveryTypes = new HashMap<>(); discoveryTypes.put("zen", () -> new ZenDiscovery(settings, threadPool, transportService, namedWriteableRegistry, masterService, clusterApplier, diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java new file mode 100644 index 0000000000000..6d6453c776e68 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.zen; + +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.transport.TransportService; + +import java.util.List; +import java.util.function.Function; + +import static java.util.Collections.emptyList; + +/** + * An implementation of {@link UnicastHostsProvider} that reads hosts/ports + * from the "discovery.zen.ping.unicast.hosts" node setting. If the port is + * left off an entry, a default port of 9300 is assumed. + * + * An example unicast hosts setting might look as follows: + * [67.81.244.10, 67.81.244.11:9305, 67.81.244.15:9400] + */ +public class SettingsBasedHostsProvider extends AbstractComponent implements UnicastHostsProvider { + + public static final Setting> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = + Setting.listSetting("discovery.zen.ping.unicast.hosts", emptyList(), Function.identity(), Setting.Property.NodeScope); + + // these limits are per-address + public static final int LIMIT_FOREIGN_PORTS_COUNT = 1; + public static final int LIMIT_LOCAL_PORTS_COUNT = 5; + + private final List configuredHosts; + + private final int limitPortCounts; + + public SettingsBasedHostsProvider(Settings settings, TransportService transportService) { + super(settings); + + if (DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.exists(settings)) { + configuredHosts = DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.get(settings); + // we only limit to 1 address, makes no sense to ping 100 ports + limitPortCounts = LIMIT_FOREIGN_PORTS_COUNT; + } else { + // if unicast hosts are not specified, fill with simple defaults on the local machine + configuredHosts = transportService.getLocalAddresses(); + limitPortCounts = LIMIT_LOCAL_PORTS_COUNT; + } + + logger.debug("using initial hosts {}", configuredHosts); + } + + @Override + public List buildDynamicHosts(HostsResolver hostsResolver) { + return hostsResolver.resolveHosts(configuredHosts, limitPortCounts); + } + +} diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java index d719f9d123b8c..86410005c92bf 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java @@ -31,5 +31,15 @@ public interface UnicastHostsProvider { /** * Builds the dynamic list of unicast hosts to be used for unicast discovery. */ - List buildDynamicHosts(); + List buildDynamicHosts(HostsResolver hostsResolver); + + /** + * Helper object that allows to resolve a list of hosts to a list of transport addresses. + * Each host is resolved into a transport address (or a collection of addresses if the + * number of ports is greater than one) + */ + interface HostsResolver { + List resolveHosts(List hosts, int limitPortCounts); + } + } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index cbadbb4a1e09b..9c86fa17e9b06 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -82,11 +82,9 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; -import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; -import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; @@ -94,26 +92,15 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { public static final String ACTION_NAME = "internal:discovery/zen/unicast"; - public static final Setting> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = - Setting.listSetting("discovery.zen.ping.unicast.hosts", emptyList(), Function.identity(), - Property.NodeScope); public static final Setting DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING = Setting.intSetting("discovery.zen.ping.unicast.concurrent_connects", 10, 0, Property.NodeScope); public static final Setting DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT = Setting.positiveTimeSetting("discovery.zen.ping.unicast.hosts.resolve_timeout", TimeValue.timeValueSeconds(5), Property.NodeScope); - // these limits are per-address - public static final int LIMIT_FOREIGN_PORTS_COUNT = 1; - public static final int LIMIT_LOCAL_PORTS_COUNT = 5; - private final ThreadPool threadPool; private final TransportService transportService; private final ClusterName clusterName; - private final List configuredHosts; - - private final int limitPortCounts; - private final PingContextProvider contextProvider; private final AtomicInteger pingingRoundIdGenerator = new AtomicInteger(); @@ -141,19 +128,10 @@ public UnicastZenPing(Settings settings, ThreadPool threadPool, TransportService this.contextProvider = contextProvider; final int concurrentConnects = DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING.get(settings); - if (DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.exists(settings)) { - configuredHosts = DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.get(settings); - // we only limit to 1 addresses, makes no sense to ping 100 ports - limitPortCounts = LIMIT_FOREIGN_PORTS_COUNT; - } else { - // if unicast hosts are not specified, fill with simple defaults on the local machine - configuredHosts = transportService.getLocalAddresses(); - limitPortCounts = LIMIT_LOCAL_PORTS_COUNT; - } + resolveTimeout = DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT.get(settings); logger.debug( - "using initial hosts {}, with concurrent_connects [{}], resolve_timeout [{}]", - configuredHosts, + "using concurrent_connects [{}], resolve_timeout [{}]", concurrentConnects, resolveTimeout); @@ -172,9 +150,9 @@ public UnicastZenPing(Settings settings, ThreadPool threadPool, TransportService } /** - * Resolves a list of hosts to a list of discovery nodes. Each host is resolved into a transport address (or a collection of addresses - * if the number of ports is greater than one) and the transport addresses are used to created discovery nodes. Host lookups are done - * in parallel using specified executor service up to the specified resolve timeout. + * Resolves a list of hosts to a list of transport addresses. Each host is resolved into a transport address (or a collection of + * addresses if the number of ports is greater than one). Host lookups are done in parallel using specified executor service up + * to the specified resolve timeout. * * @param executorService the executor service used to parallelize hostname lookups * @param logger logger used for logging messages regarding hostname lookups @@ -190,7 +168,7 @@ public static List resolveHostsLists( final List hosts, final int limitPortCounts, final TransportService transportService, - final TimeValue resolveTimeout) throws InterruptedException { + final TimeValue resolveTimeout) { Objects.requireNonNull(executorService); Objects.requireNonNull(logger); Objects.requireNonNull(hosts); @@ -205,8 +183,13 @@ public static List resolveHostsLists( .stream() .map(hn -> (Callable) () -> transportService.addressesFromString(hn, limitPortCounts)) .collect(Collectors.toList()); - final List> futures = - executorService.invokeAll(callables, resolveTimeout.nanos(), TimeUnit.NANOSECONDS); + final List> futures; + try { + futures = executorService.invokeAll(callables, resolveTimeout.nanos(), TimeUnit.NANOSECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return Collections.emptyList(); + } final List transportAddresses = new ArrayList<>(); final Set localAddresses = new HashSet<>(); localAddresses.add(transportService.boundAddress().publishAddress()); @@ -232,6 +215,9 @@ public static List resolveHostsLists( assert e.getCause() != null; final String message = "failed to resolve host [" + hostname + "]"; logger.warn(message, e.getCause()); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + // ignore } } else { logger.warn("timed out after [{}] resolving host [{}]", resolveTimeout, hostname); @@ -240,6 +226,11 @@ public static List resolveHostsLists( return Collections.unmodifiableList(transportAddresses); } + private UnicastHostsProvider.HostsResolver createHostsResolver() { + return (hosts, limitPortCounts) -> resolveHostsLists(unicastZenPingExecutorService, logger, hosts, + limitPortCounts, transportService, resolveTimeout); + } + @Override public void close() { ThreadPool.terminate(unicastZenPingExecutorService, 10, TimeUnit.SECONDS); @@ -281,18 +272,7 @@ protected void ping(final Consumer resultsConsumer, final TimeValue scheduleDuration, final TimeValue requestDuration) { final List seedAddresses = new ArrayList<>(); - try { - seedAddresses.addAll(resolveHostsLists( - unicastZenPingExecutorService, - logger, - configuredHosts, - limitPortCounts, - transportService, - resolveTimeout)); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - seedAddresses.addAll(hostsProvider.buildDynamicHosts()); + seedAddresses.addAll(hostsProvider.buildDynamicHosts(createHostsResolver())); final DiscoveryNodes nodes = contextProvider.clusterState().nodes(); // add all possible master nodes that were active in the last known cluster configuration for (ObjectCursor masterNode : nodes.getMasterNodes().values()) { diff --git a/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java index 18829d515973d..f2491b2db1f9a 100644 --- a/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java @@ -137,11 +137,10 @@ public void testDuplicateDiscovery() { public void testHostsProvider() { Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "custom").build(); - final UnicastHostsProvider provider = Collections::emptyList; AtomicBoolean created = new AtomicBoolean(false); DummyHostsProviderPlugin plugin = () -> Collections.singletonMap("custom", () -> { created.set(true); - return Collections::emptyList; + return hostsResolver -> Collections.emptyList(); }); newModule(settings, Collections.singletonList(plugin)); assertTrue(created.get()); @@ -151,7 +150,7 @@ public void testUnknownHostsProvider() { Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "dne").build(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> newModule(settings, Collections.emptyList())); - assertEquals("Unknown zen hosts provider [dne]", e.getMessage()); + assertEquals("Unknown zen hosts providers [dne]", e.getMessage()); } public void testDuplicateHostsProvider() { @@ -162,6 +161,37 @@ public void testDuplicateHostsProvider() { assertEquals("Cannot register zen hosts provider [dup] twice", e.getMessage()); } + public void testSettingsHostsProvider() { + DummyHostsProviderPlugin plugin = () -> Collections.singletonMap("settings", () -> null); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + newModule(Settings.EMPTY, Arrays.asList(plugin))); + assertEquals("Cannot register zen hosts provider [settings] twice", e.getMessage()); + } + + public void testMultiHostsProvider() { + AtomicBoolean created1 = new AtomicBoolean(false); + DummyHostsProviderPlugin plugin1 = () -> Collections.singletonMap("provider1", () -> { + created1.set(true); + return hostsResolver -> Collections.emptyList(); + }); + AtomicBoolean created2 = new AtomicBoolean(false); + DummyHostsProviderPlugin plugin2 = () -> Collections.singletonMap("provider2", () -> { + created2.set(true); + return hostsResolver -> Collections.emptyList(); + }); + AtomicBoolean created3 = new AtomicBoolean(false); + DummyHostsProviderPlugin plugin3 = () -> Collections.singletonMap("provider3", () -> { + created3.set(true); + return hostsResolver -> Collections.emptyList(); + }); + Settings settings = Settings.builder().putList(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), + "provider1", "provider3").build(); + newModule(settings, Arrays.asList(plugin1, plugin2, plugin3)); + assertTrue(created1.get()); + assertFalse(created2.get()); + assertTrue(created3.get()); + } + public void testLazyConstructionHostsProvider() { DummyHostsProviderPlugin plugin = () -> Collections.singletonMap("custom", () -> { diff --git a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java index 33c87ea7f383e..c3ffbb82081b7 100644 --- a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java @@ -84,7 +84,7 @@ public void testDoesNotRespondToZenPings() throws Exception { internalCluster().getInstance(TransportService.class); // try to ping the single node directly final UnicastHostsProvider provider = - () -> Collections.singletonList(nodeTransport.getLocalNode().getAddress()); + hostsResolver -> Collections.singletonList(nodeTransport.getLocalNode().getAddress()); final CountDownLatch latch = new CountDownLatch(1); final DiscoveryNodes nodes = DiscoveryNodes.builder() .add(nodeTransport.getLocalNode()) diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index 4aa75077431e7..eef926a1e1238 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -137,8 +137,6 @@ public void tearDown() throws Exception { } } - private static final UnicastHostsProvider EMPTY_HOSTS_PROVIDER = Collections::emptyList; - public void testSimplePings() throws IOException, InterruptedException, ExecutionException { // use ephemeral ports final Settings settings = Settings.builder().put("cluster.name", "test").put(TcpTransport.PORT.getKey(), 0).build(); @@ -182,7 +180,7 @@ public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfil final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build(); final ClusterState stateMismatch = ClusterState.builder(new ClusterName("mismatch")).version(randomNonNegativeLong()).build(); - Settings hostsSettings = Settings.builder() + final Settings hostsSettings = Settings.builder() .putList("discovery.zen.ping.unicast.hosts", NetworkAddress.format(new InetSocketAddress(handleA.address.address().getAddress(), handleA.address.address().getPort())), NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort())), @@ -196,22 +194,21 @@ public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfil .blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) .nodes(DiscoveryNodes.builder().add(handleA.node).localNodeId("UZP_A")) .build(); - TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA); + TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, () -> stateA); zenPingA.start(); closeables.push(zenPingA); ClusterState stateB = ClusterState.builder(state) .nodes(DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")) .build(); - TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, () -> stateB); zenPingB.start(); closeables.push(zenPingB); ClusterState stateC = ClusterState.builder(stateMismatch) .nodes(DiscoveryNodes.builder().add(handleC.node).localNodeId("UZP_C")) .build(); - TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleC, - EMPTY_HOSTS_PROVIDER, () -> stateC) { + TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleC, () -> stateC) { @Override protected Version getVersion() { return versionD; @@ -223,8 +220,7 @@ protected Version getVersion() { ClusterState stateD = ClusterState.builder(stateMismatch) .nodes(DiscoveryNodes.builder().add(handleD.node).localNodeId("UZP_D")) .build(); - TestUnicastZenPing zenPingD = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleD, - EMPTY_HOSTS_PROVIDER, () -> stateD); + TestUnicastZenPing zenPingD = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleD, () -> stateD); zenPingD.start(); closeables.push(zenPingD); @@ -329,21 +325,21 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi .blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) .nodes(DiscoveryNodes.builder().add(handleA.node).localNodeId("UZP_A")) .build(); - final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA); + final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, () -> stateA); zenPingA.start(); closeables.push(zenPingA); ClusterState stateB = ClusterState.builder(state) .nodes(DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")) .build(); - TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, () -> stateB); zenPingB.start(); closeables.push(zenPingB); ClusterState stateC = ClusterState.builder(state) .nodes(DiscoveryNodes.builder().add(handleC.node).localNodeId("UZP_C")) .build(); - TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettings, threadPool, handleC, EMPTY_HOSTS_PROVIDER, () -> stateC); + TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettings, threadPool, handleC, () -> stateC); zenPingC.start(); closeables.push(zenPingC); @@ -408,7 +404,7 @@ public BoundTransportAddress boundAddress() { Collections.emptySet()); closeables.push(transportService); final int limitPortCounts = randomIntBetween(1, 10); - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Collections.singletonList("127.0.0.1"), @@ -452,7 +448,7 @@ public BoundTransportAddress boundAddress() { new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); closeables.push(transportService); - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Collections.singletonList(NetworkAddress.format(loopbackAddress)), @@ -503,7 +499,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi Collections.emptySet()); closeables.push(transportService); - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList(hostname), @@ -562,7 +558,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi closeables.push(transportService); final TimeValue resolveTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 3)); try { - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList("hostname1", "hostname2"), @@ -610,6 +606,7 @@ public void testResolveReuseExistingNodeConnections() throws ExecutionException, hostsSettingsBuilder.put("discovery.zen.ping.unicast.hosts", (String) null); } final Settings hostsSettings = hostsSettingsBuilder.build(); + final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build(); // connection to reuse @@ -627,14 +624,14 @@ public void onConnectionOpened(Transport.Connection connection) { .blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) .nodes(DiscoveryNodes.builder().add(handleA.node).add(handleB.node).localNodeId("UZP_A")) .build(); - final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA); + final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, () -> stateA); zenPingA.start(); closeables.push(zenPingA); final ClusterState stateB = ClusterState.builder(state) .nodes(DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")) .build(); - TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, () -> stateB); zenPingB.start(); closeables.push(zenPingB); @@ -669,19 +666,20 @@ public void testPingingTemporalPings() throws ExecutionException, InterruptedExc .put("cluster.name", "test") .put("discovery.zen.ping.unicast.hosts", (String) null) // use nodes for simplicity .build(); + final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build(); final ClusterState stateA = ClusterState.builder(state) .blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) .nodes(DiscoveryNodes.builder().add(handleA.node).add(handleB.node).localNodeId("UZP_A")).build(); - final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA); + final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, () -> stateA); zenPingA.start(); closeables.push(zenPingA); // Node B doesn't know about A! final ClusterState stateB = ClusterState.builder(state).nodes( DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")).build(); - TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, () -> stateB); zenPingB.start(); closeables.push(zenPingB); @@ -728,7 +726,7 @@ public BoundTransportAddress boundAddress() { new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); closeables.push(transportService); - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList("127.0.0.1:9300:9300", "127.0.0.1:9301"), @@ -828,9 +826,10 @@ private static class NetworkHandle { private static class TestUnicastZenPing extends UnicastZenPing { TestUnicastZenPing(Settings settings, ThreadPool threadPool, NetworkHandle networkHandle, - UnicastHostsProvider unicastHostsProvider, PingContextProvider contextProvider) { + PingContextProvider contextProvider) { super(Settings.builder().put("node.name", networkHandle.node.getName()).put(settings).build(), - threadPool, networkHandle.transportService, unicastHostsProvider, contextProvider); + threadPool, networkHandle.transportService, + new SettingsBasedHostsProvider(settings, networkHandle.transportService), contextProvider); } volatile CountDownLatch allTasksCompleted; diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java index 9273ab1514372..a60a23bcd6d5c 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java @@ -317,7 +317,7 @@ public void onNewClusterState(String source, Supplier clusterState } }; ZenDiscovery zenDiscovery = new ZenDiscovery(settings, threadPool, service, new NamedWriteableRegistry(ClusterModule.getNamedWriteables()), - masterService, clusterApplier, clusterSettings, Collections::emptyList, ESAllocationTestCase.createAllocationService(), + masterService, clusterApplier, clusterSettings, hostsResolver -> Collections.emptyList(), ESAllocationTestCase.createAllocationService(), Collections.emptyList()); zenDiscovery.start(); return zenDiscovery; diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java index 2e60a3c518dd3..dc9304637cdca 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java @@ -56,7 +56,7 @@ public MockUncasedHostProvider(Supplier localNodeSupplier, Cluste } @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { final DiscoveryNode localNode = getNode(); assert localNode != null; synchronized (activeNodesPerCluster) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java index 11f9e38e665ff..5387a659aa274 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java @@ -45,7 +45,7 @@ import java.util.Map; import java.util.function.Supplier; -import static org.elasticsearch.discovery.zen.UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING; +import static org.elasticsearch.discovery.zen.SettingsBasedHostsProvider.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING; /** * A alternative zen discovery which allows using mocks for things like pings, as well as @@ -84,7 +84,7 @@ public Map> getZenHostsProviders(Transpor final Supplier supplier; if (USE_MOCK_PINGS.get(settings)) { // we have to return something in order for the unicast host provider setting to resolve to something. It will never be used - supplier = () -> () -> { + supplier = () -> hostsResolver -> { throw new UnsupportedOperationException(); }; } else { From c6cbc99f9c90c75e883393dc6b4691033ab73d72 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Fri, 22 Jun 2018 15:13:31 +0100 Subject: [PATCH 22/34] [ML] Add ML filter update API (#31437) This adds an api to allow updating a filter: POST _xpack/ml/filters/{filter_id}/_update The request body may have: - description: setting a new description - add_items: a list of the items to add - remove_items: a list of the items to remove This commit also changes the PUT filter api to error when the filter_id is already used. As now there is an api for updating filters, the put api should only be used to create new ones. Also, updating a filter results into a notification message auditing the change for every job that is using that filter. --- .../xpack/core/XPackClientPlugin.java | 2 + .../core/ml/action/UpdateFilterAction.java | 187 ++++++++++++++++++ .../xpack/core/ml/job/config/MlFilter.java | 13 +- .../xpack/core/ml/job/messages/Messages.java | 2 + .../autodetect/state/ModelSnapshot.java | 4 +- .../xpack/core/ml/utils/ExceptionsHelper.java | 4 + .../UpdateFilterActionRequestTests.java | 58 ++++++ .../core/ml/job/config/MlFilterTests.java | 9 +- .../xpack/ml/MachineLearning.java | 5 + .../ml/action/TransportGetFiltersAction.java | 9 +- .../ml/action/TransportPutFilterAction.java | 37 ++-- .../action/TransportUpdateFilterAction.java | 173 ++++++++++++++++ .../xpack/ml/job/JobManager.java | 53 +++-- .../persistence/BatchedBucketsIterator.java | 4 +- .../BatchedInfluencersIterator.java | 4 +- .../persistence/BatchedRecordsIterator.java | 4 +- .../xpack/ml/job/persistence/JobProvider.java | 19 +- .../rest/filter/RestUpdateFilterAction.java | 41 ++++ .../xpack/ml/job/JobManagerTests.java | 90 ++++++++- .../api/xpack.ml.update_filter.json | 20 ++ .../test/ml/custom_all_field.yml | 2 + .../test/ml/delete_model_snapshot.yml | 2 + .../rest-api-spec/test/ml/filter_crud.yml | 68 ++++++- .../test/ml/get_model_snapshots.yml | 3 + .../rest-api-spec/test/ml/index_layout.yml | 2 + .../rest-api-spec/test/ml/jobs_crud.yml | 4 + .../test/ml/jobs_get_result_buckets.yml | 3 + .../test/ml/jobs_get_result_categories.yml | 3 + .../test/ml/jobs_get_result_influencers.yml | 3 + .../ml/jobs_get_result_overall_buckets.yml | 9 + .../test/ml/jobs_get_result_records.yml | 2 + .../rest-api-spec/test/ml/jobs_get_stats.yml | 2 + .../test/ml/ml_anomalies_default_mappings.yml | 1 + .../test/ml/revert_model_snapshot.yml | 9 + .../test/ml/update_model_snapshot.yml | 2 + .../ml/integration/DetectionRulesIT.java | 12 +- .../smoke-test-ml-with-security/build.gradle | 1 + 37 files changed, 794 insertions(+), 72 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterActionRequestTests.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.update_filter.json diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 2894138248b8c..0bf6601593dee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -84,6 +84,7 @@ import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateDatafeedAction; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.UpdateProcessAction; @@ -220,6 +221,7 @@ public List getClientActions() { OpenJobAction.INSTANCE, GetFiltersAction.INSTANCE, PutFilterAction.INSTANCE, + UpdateFilterAction.INSTANCE, DeleteFilterAction.INSTANCE, KillProcessAction.INSTANCE, GetBucketsAction.INSTANCE, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java new file mode 100644 index 0000000000000..57b3d3457d736 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java @@ -0,0 +1,187 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Objects; +import java.util.SortedSet; +import java.util.TreeSet; + + +public class UpdateFilterAction extends Action { + + public static final UpdateFilterAction INSTANCE = new UpdateFilterAction(); + public static final String NAME = "cluster:admin/xpack/ml/filters/update"; + + private UpdateFilterAction() { + super(NAME); + } + + @Override + public PutFilterAction.Response newResponse() { + return new PutFilterAction.Response(); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + public static final ParseField ADD_ITEMS = new ParseField("add_items"); + public static final ParseField REMOVE_ITEMS = new ParseField("remove_items"); + + private static final ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); + + static { + PARSER.declareString((request, filterId) -> request.filterId = filterId, MlFilter.ID); + PARSER.declareStringOrNull(Request::setDescription, MlFilter.DESCRIPTION); + PARSER.declareStringArray(Request::setAddItems, ADD_ITEMS); + PARSER.declareStringArray(Request::setRemoveItems, REMOVE_ITEMS); + } + + public static Request parseRequest(String filterId, XContentParser parser) { + Request request = PARSER.apply(parser, null); + if (request.filterId == null) { + request.filterId = filterId; + } else if (!Strings.isNullOrEmpty(filterId) && !filterId.equals(request.filterId)) { + // If we have both URI and body filter ID, they must be identical + throw new IllegalArgumentException(Messages.getMessage(Messages.INCONSISTENT_ID, MlFilter.ID.getPreferredName(), + request.filterId, filterId)); + } + return request; + } + + private String filterId; + @Nullable + private String description; + private SortedSet addItems = Collections.emptySortedSet(); + private SortedSet removeItems = Collections.emptySortedSet(); + + public Request() { + } + + public Request(String filterId) { + this.filterId = ExceptionsHelper.requireNonNull(filterId, MlFilter.ID.getPreferredName()); + } + + public String getFilterId() { + return filterId; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public SortedSet getAddItems() { + return addItems; + } + + public void setAddItems(Collection addItems) { + this.addItems = new TreeSet<>(ExceptionsHelper.requireNonNull(addItems, ADD_ITEMS.getPreferredName())); + } + + public SortedSet getRemoveItems() { + return removeItems; + } + + public void setRemoveItems(Collection removeItems) { + this.removeItems = new TreeSet<>(ExceptionsHelper.requireNonNull(removeItems, REMOVE_ITEMS.getPreferredName())); + } + + public boolean isNoop() { + return description == null && addItems.isEmpty() && removeItems.isEmpty(); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + filterId = in.readString(); + description = in.readOptionalString(); + addItems = new TreeSet<>(Arrays.asList(in.readStringArray())); + removeItems = new TreeSet<>(Arrays.asList(in.readStringArray())); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(filterId); + out.writeOptionalString(description); + out.writeStringArray(addItems.toArray(new String[addItems.size()])); + out.writeStringArray(removeItems.toArray(new String[removeItems.size()])); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(MlFilter.ID.getPreferredName(), filterId); + if (description != null) { + builder.field(MlFilter.DESCRIPTION.getPreferredName(), description); + } + if (addItems.isEmpty() == false) { + builder.field(ADD_ITEMS.getPreferredName(), addItems); + } + if (removeItems.isEmpty() == false) { + builder.field(REMOVE_ITEMS.getPreferredName(), removeItems); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(filterId, description, addItems, removeItems); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(filterId, other.filterId) + && Objects.equals(description, other.description) + && Objects.equals(addItems, other.addItems) + && Objects.equals(removeItems, other.removeItems); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + public RequestBuilder(ElasticsearchClient client) { + super(client, INSTANCE, new Request()); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java index b11dfd476515c..b45ce73f124fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java @@ -56,7 +56,7 @@ private static ObjectParser createParser(boolean ignoreUnknownFie private final String description; private final SortedSet items; - public MlFilter(String id, String description, SortedSet items) { + private MlFilter(String id, String description, SortedSet items) { this.id = Objects.requireNonNull(id, ID.getPreferredName() + " must not be null"); this.description = description; this.items = Objects.requireNonNull(items, ITEMS.getPreferredName() + " must not be null"); @@ -69,8 +69,7 @@ public MlFilter(StreamInput in) throws IOException { } else { description = null; } - items = new TreeSet<>(); - items.addAll(Arrays.asList(in.readStringArray())); + items = new TreeSet<>(Arrays.asList(in.readStringArray())); } @Override @@ -163,9 +162,13 @@ public Builder setDescription(String description) { return this; } + public Builder setItems(SortedSet items) { + this.items = items; + return this; + } + public Builder setItems(List items) { - this.items = new TreeSet<>(); - this.items.addAll(items); + this.items = new TreeSet<>(items); return this; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index 79d8f068d91f8..f0329051fed95 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -42,6 +42,8 @@ public final class Messages { public static final String DATAFEED_FREQUENCY_MUST_BE_MULTIPLE_OF_AGGREGATIONS_INTERVAL = "Datafeed frequency [{0}] must be a multiple of the aggregation interval [{1}]"; + public static final String FILTER_NOT_FOUND = "No filter with id [{0}] exists"; + public static final String INCONSISTENT_ID = "Inconsistent {0}; ''{1}'' specified in the body differs from ''{2}'' specified as a URL argument"; public static final String INVALID_ID = "Invalid {0}; ''{1}'' can contain lowercase alphanumeric (a-z and 0-9), hyphens or " + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java index 1588298918e22..03487500d8a8b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java @@ -19,9 +19,9 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -345,7 +345,7 @@ public static String v54DocumentId(String jobId, String snapshotId) { public static ModelSnapshot fromJson(BytesReference bytesReference) { try (InputStream stream = bytesReference.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(bytesReference)) + XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { return LENIENT_PARSER.apply(parser, null).build(); } catch (IOException e) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java index 150c539b1ae3b..d5b83d25ce315 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java @@ -38,6 +38,10 @@ public static ElasticsearchException serverError(String msg, Throwable cause) { return new ElasticsearchException(msg, cause); } + public static ElasticsearchStatusException conflictStatusException(String msg, Throwable cause, Object... args) { + return new ElasticsearchStatusException(msg, RestStatus.CONFLICT, cause, args); + } + public static ElasticsearchStatusException conflictStatusException(String msg, Object... args) { return new ElasticsearchStatusException(msg, RestStatus.CONFLICT, args); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterActionRequestTests.java new file mode 100644 index 0000000000000..f07eba7e90ebb --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterActionRequestTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction.Request; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +public class UpdateFilterActionRequestTests extends AbstractStreamableXContentTestCase { + + private String filterId = randomAlphaOfLength(20); + + @Override + protected Request createTestInstance() { + UpdateFilterAction.Request request = new UpdateFilterAction.Request(filterId); + if (randomBoolean()) { + request.setDescription(randomAlphaOfLength(20)); + } + if (randomBoolean()) { + request.setAddItems(generateRandomStrings()); + } + if (randomBoolean()) { + request.setRemoveItems(generateRandomStrings()); + } + return request; + } + + private static Collection generateRandomStrings() { + int size = randomIntBetween(0, 10); + List strings = new ArrayList<>(size); + for (int i = 0; i < size; ++i) { + strings.add(randomAlphaOfLength(20)); + } + return strings; + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected Request createBlankInstance() { + return new Request(); + } + + @Override + protected Request doParseInstance(XContentParser parser) { + return Request.parseRequest(filterId, parser); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java index 9ac6683f004c5..c8d8527dc0158 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; +import java.util.SortedSet; import java.util.TreeSet; import static org.hamcrest.Matchers.contains; @@ -43,7 +44,7 @@ public static MlFilter createRandom(String filterId) { for (int i = 0; i < size; i++) { items.add(randomAlphaOfLengthBetween(1, 20)); } - return new MlFilter(filterId, description, items); + return MlFilter.builder(filterId).setDescription(description).setItems(items).build(); } @Override @@ -57,13 +58,13 @@ protected MlFilter doParseInstance(XContentParser parser) { } public void testNullId() { - NullPointerException ex = expectThrows(NullPointerException.class, () -> new MlFilter(null, "", new TreeSet<>())); + NullPointerException ex = expectThrows(NullPointerException.class, () -> MlFilter.builder(null).build()); assertEquals(MlFilter.ID.getPreferredName() + " must not be null", ex.getMessage()); } public void testNullItems() { - NullPointerException ex = - expectThrows(NullPointerException.class, () -> new MlFilter(randomAlphaOfLengthBetween(1, 20), "", null)); + NullPointerException ex = expectThrows(NullPointerException.class, + () -> MlFilter.builder(randomAlphaOfLength(20)).setItems((SortedSet) null).build()); assertEquals(MlFilter.ITEMS.getPreferredName() + " must not be null", ex.getMessage()); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index a1714a8e3f5db..3d1011c47e2a8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -97,6 +97,7 @@ import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateDatafeedAction; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.UpdateProcessAction; @@ -148,6 +149,7 @@ import org.elasticsearch.xpack.ml.action.TransportStopDatafeedAction; import org.elasticsearch.xpack.ml.action.TransportUpdateCalendarJobAction; import org.elasticsearch.xpack.ml.action.TransportUpdateDatafeedAction; +import org.elasticsearch.xpack.ml.action.TransportUpdateFilterAction; import org.elasticsearch.xpack.ml.action.TransportUpdateJobAction; import org.elasticsearch.xpack.ml.action.TransportUpdateModelSnapshotAction; import org.elasticsearch.xpack.ml.action.TransportUpdateProcessAction; @@ -196,6 +198,7 @@ import org.elasticsearch.xpack.ml.rest.filter.RestDeleteFilterAction; import org.elasticsearch.xpack.ml.rest.filter.RestGetFiltersAction; import org.elasticsearch.xpack.ml.rest.filter.RestPutFilterAction; +import org.elasticsearch.xpack.ml.rest.filter.RestUpdateFilterAction; import org.elasticsearch.xpack.ml.rest.job.RestCloseJobAction; import org.elasticsearch.xpack.ml.rest.job.RestDeleteJobAction; import org.elasticsearch.xpack.ml.rest.job.RestFlushJobAction; @@ -460,6 +463,7 @@ public List getRestHandlers(Settings settings, RestController restC new RestOpenJobAction(settings, restController), new RestGetFiltersAction(settings, restController), new RestPutFilterAction(settings, restController), + new RestUpdateFilterAction(settings, restController), new RestDeleteFilterAction(settings, restController), new RestGetInfluencersAction(settings, restController), new RestGetRecordsAction(settings, restController), @@ -511,6 +515,7 @@ public List getRestHandlers(Settings settings, RestController restC new ActionHandler<>(OpenJobAction.INSTANCE, TransportOpenJobAction.class), new ActionHandler<>(GetFiltersAction.INSTANCE, TransportGetFiltersAction.class), new ActionHandler<>(PutFilterAction.INSTANCE, TransportPutFilterAction.class), + new ActionHandler<>(UpdateFilterAction.INSTANCE, TransportUpdateFilterAction.class), new ActionHandler<>(DeleteFilterAction.INSTANCE, TransportDeleteFilterAction.class), new ActionHandler<>(KillProcessAction.INSTANCE, TransportKillProcessAction.class), new ActionHandler<>(GetBucketsAction.INSTANCE, TransportGetBucketsAction.class), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index 1be7be4a5d2b3..c8cd7a0d63bb7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -21,8 +21,8 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -80,9 +80,8 @@ public void onResponse(GetResponse getDocResponse) { if (getDocResponse.isExists()) { BytesReference docSource = getDocResponse.getSourceAsBytesRef(); try (InputStream stream = docSource.streamInput(); - XContentParser parser = - XContentFactory.xContent(getDocResponse.getSourceAsBytes()) - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { MlFilter filter = MlFilter.LENIENT_PARSER.apply(parser, null).build(); responseBody = new QueryPage<>(Collections.singletonList(filter), 1, MlFilter.RESULTS_FIELD); @@ -122,7 +121,7 @@ public void onResponse(SearchResponse response) { for (SearchHit hit : response.getHits().getHits()) { BytesReference docSource = hit.getSourceRef(); try (InputStream stream = docSource.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(docSource)).createParser( + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser( NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { docs.add(MlFilter.LENIENT_PARSER.apply(parser, null).build()); } catch (IOException e) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java index cb35daef8668c..011606f3c14ed 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -5,11 +5,12 @@ */ package org.elasticsearch.xpack.ml.action; +import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; @@ -19,12 +20,12 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PutFilterAction; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.job.JobManager; import java.io.IOException; import java.util.Collections; @@ -36,42 +37,44 @@ public class TransportPutFilterAction extends HandledTransportAction { private final Client client; - private final JobManager jobManager; @Inject - public TransportPutFilterAction(Settings settings, TransportService transportService, ActionFilters actionFilters, - Client client, JobManager jobManager) { + public TransportPutFilterAction(Settings settings, TransportService transportService, ActionFilters actionFilters, Client client) { super(settings, PutFilterAction.NAME, transportService, actionFilters, - (Supplier) PutFilterAction.Request::new); + (Supplier) PutFilterAction.Request::new); this.client = client; - this.jobManager = jobManager; } @Override protected void doExecute(PutFilterAction.Request request, ActionListener listener) { MlFilter filter = request.getFilter(); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); + indexRequest.opType(DocWriteRequest.OpType.CREATE); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")); indexRequest.source(filter.toXContent(builder, params)); } catch (IOException e) { throw new IllegalStateException("Failed to serialise filter with id [" + filter.getId() + "]", e); } - BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); - bulkRequestBuilder.add(indexRequest); - bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), - new ActionListener() { + executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, + new ActionListener() { @Override - public void onResponse(BulkResponse indexResponse) { - jobManager.updateProcessOnFilterChanged(filter); + public void onResponse(IndexResponse indexResponse) { listener.onResponse(new PutFilterAction.Response(filter)); } @Override public void onFailure(Exception e) { - listener.onFailure(ExceptionsHelper.serverError("Error putting filter with id [" + filter.getId() + "]", e)); + Exception reportedException; + if (e instanceof VersionConflictEngineException) { + reportedException = new ResourceAlreadyExistsException("A filter with id [" + filter.getId() + + "] already exists"); + } else { + reportedException = ExceptionsHelper.serverError("Error putting filter with id [" + filter.getId() + "]", e); + } + listener.onFailure(reportedException); } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java new file mode 100644 index 0000000000000..37f550fbb02ea --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java @@ -0,0 +1,173 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ml.MlMetaIndex; +import org.elasticsearch.xpack.core.ml.action.PutFilterAction; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.job.JobManager; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Collections; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +public class TransportUpdateFilterAction extends HandledTransportAction { + + private final Client client; + private final JobManager jobManager; + + @Inject + public TransportUpdateFilterAction(Settings settings, TransportService transportService, ActionFilters actionFilters, Client client, + JobManager jobManager) { + super(settings, UpdateFilterAction.NAME, transportService, actionFilters, + (Supplier) UpdateFilterAction.Request::new); + this.client = client; + this.jobManager = jobManager; + } + + @Override + protected void doExecute(UpdateFilterAction.Request request, ActionListener listener) { + ActionListener filterListener = ActionListener.wrap(filterWithVersion -> { + updateFilter(filterWithVersion, request, listener); + }, listener::onFailure); + + getFilterWithVersion(request.getFilterId(), filterListener); + } + + private void updateFilter(FilterWithVersion filterWithVersion, UpdateFilterAction.Request request, + ActionListener listener) { + MlFilter filter = filterWithVersion.filter; + + if (request.isNoop()) { + listener.onResponse(new PutFilterAction.Response(filter)); + return; + } + + String description = request.getDescription() == null ? filter.getDescription() : request.getDescription(); + SortedSet items = new TreeSet<>(filter.getItems()); + items.addAll(request.getAddItems()); + + // Check if removed items are present to avoid typos + for (String toRemove : request.getRemoveItems()) { + boolean wasPresent = items.remove(toRemove); + if (wasPresent == false) { + listener.onFailure(ExceptionsHelper.badRequestException("Cannot remove item [" + toRemove + + "] as it is not present in filter [" + filter.getId() + "]")); + return; + } + } + + MlFilter updatedFilter = MlFilter.builder(filter.getId()).setDescription(description).setItems(items).build(); + indexUpdatedFilter(updatedFilter, filterWithVersion.version, request, listener); + } + + private void indexUpdatedFilter(MlFilter filter, long version, UpdateFilterAction.Request request, + ActionListener listener) { + IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); + indexRequest.version(version); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")); + indexRequest.source(filter.toXContent(builder, params)); + } catch (IOException e) { + throw new IllegalStateException("Failed to serialise filter with id [" + filter.getId() + "]", e); + } + + executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + jobManager.notifyFilterChanged(filter, request.getAddItems(), request.getRemoveItems()); + listener.onResponse(new PutFilterAction.Response(filter)); + } + + @Override + public void onFailure(Exception e) { + Exception reportedException; + if (e instanceof VersionConflictEngineException) { + reportedException = ExceptionsHelper.conflictStatusException("Error updating filter with id [" + filter.getId() + + "] because it was modified while the update was in progress", e); + } else { + reportedException = ExceptionsHelper.serverError("Error updating filter with id [" + filter.getId() + "]", e); + } + listener.onFailure(reportedException); + } + }); + } + + private void getFilterWithVersion(String filterId, ActionListener listener) { + GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, MlFilter.documentId(filterId)); + executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener() { + @Override + public void onResponse(GetResponse getDocResponse) { + try { + if (getDocResponse.isExists()) { + BytesReference docSource = getDocResponse.getSourceAsBytesRef(); + try (InputStream stream = docSource.streamInput(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + MlFilter filter = MlFilter.LENIENT_PARSER.apply(parser, null).build(); + listener.onResponse(new FilterWithVersion(filter, getDocResponse.getVersion())); + } + } else { + this.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.FILTER_NOT_FOUND, filterId))); + } + } catch (Exception e) { + this.onFailure(e); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private static class FilterWithVersion { + + private final MlFilter filter; + private final long version; + + private FilterWithVersion(MlFilter filter, long version) { + this.filter = filter; + this.version = version; + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index fe6deea55e3aa..c3d31ae10e925 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -403,26 +403,55 @@ private ClusterState updateClusterState(Job job, boolean overwrite, ClusterState return buildNewClusterState(currentState, builder); } - public void updateProcessOnFilterChanged(MlFilter filter) { + public void notifyFilterChanged(MlFilter filter, Set addedItems, Set removedItems) { + if (addedItems.isEmpty() && removedItems.isEmpty()) { + return; + } + ClusterState clusterState = clusterService.state(); QueryPage jobs = expandJobs("*", true, clusterService.state()); for (Job job : jobs.results()) { - if (isJobOpen(clusterState, job.getId())) { - Set jobFilters = job.getAnalysisConfig().extractReferencedFilters(); - if (jobFilters.contains(filter.getId())) { - updateJobProcessNotifier.submitJobUpdate(UpdateParams.filterUpdate(job.getId(), filter), ActionListener.wrap( - isUpdated -> { - if (isUpdated) { - auditor.info(job.getId(), - Messages.getMessage(Messages.JOB_AUDIT_FILTER_UPDATED_ON_PROCESS, filter.getId())); - } - }, e -> {} - )); + Set jobFilters = job.getAnalysisConfig().extractReferencedFilters(); + if (jobFilters.contains(filter.getId())) { + if (isJobOpen(clusterState, job.getId())) { + updateJobProcessNotifier.submitJobUpdate(UpdateParams.filterUpdate(job.getId(), filter), + ActionListener.wrap(isUpdated -> { + auditFilterChanges(job.getId(), filter.getId(), addedItems, removedItems); + }, e -> {})); + } else { + auditFilterChanges(job.getId(), filter.getId(), addedItems, removedItems); } } } } + private void auditFilterChanges(String jobId, String filterId, Set addedItems, Set removedItems) { + StringBuilder auditMsg = new StringBuilder("Filter ["); + auditMsg.append(filterId); + auditMsg.append("] has been modified; "); + + if (addedItems.isEmpty() == false) { + auditMsg.append("added items: "); + appendCommaSeparatedSet(addedItems, auditMsg); + if (removedItems.isEmpty() == false) { + auditMsg.append(", "); + } + } + + if (removedItems.isEmpty() == false) { + auditMsg.append("removed items: "); + appendCommaSeparatedSet(removedItems, auditMsg); + } + + auditor.info(jobId, auditMsg.toString()); + } + + private static void appendCommaSeparatedSet(Set items, StringBuilder sb) { + sb.append("["); + Strings.collectionToDelimitedString(items, ", ", "'", "'", sb); + sb.append("]"); + } + public void updateProcessOnCalendarChanged(List calendarJobIds) { ClusterState clusterState = clusterService.state(); MlMetadata mlMetadata = MlMetadata.getMlMetadata(clusterState); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java index 17b4b8edadfa2..53526e2a4753d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.Result; @@ -30,7 +30,7 @@ class BatchedBucketsIterator extends BatchedResultsIterator { protected Result map(SearchHit hit) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)).createParser(NamedXContentRegistry.EMPTY, + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { Bucket bucket = Bucket.LENIENT_PARSER.apply(parser, null); return new Result<>(hit.getIndex(), bucket); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java index d084325350fc5..fe8bd3aaa3af7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.ml.job.results.Influencer; import org.elasticsearch.xpack.core.ml.job.results.Result; @@ -29,7 +29,7 @@ class BatchedInfluencersIterator extends BatchedResultsIterator { protected Result map(SearchHit hit) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)).createParser(NamedXContentRegistry.EMPTY, + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { Influencer influencer = Influencer.LENIENT_PARSER.apply(parser, null); return new Result<>(hit.getIndex(), influencer); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java index c0940dfd5aad1..22c107f771ba5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.Result; @@ -30,7 +30,7 @@ class BatchedRecordsIterator extends BatchedResultsIterator { protected Result map(SearchHit hit) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)).createParser(NamedXContentRegistry.EMPTY, + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)){ AnomalyRecord record = AnomalyRecord.LENIENT_PARSER.apply(parser, null); return new Result<>(hit.getIndex(), record); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java index 9db1877df1850..578ddd1efc78a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java @@ -50,7 +50,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; @@ -477,7 +476,7 @@ private T parseSearchHit(SearchHit hit, BiFunction Consumer errorHandler) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)) + XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { return objectParser.apply(parser, null); } catch (IOException e) { @@ -528,7 +527,7 @@ public void buckets(String jobId, BucketsQueryBuilder query, Consumer modelPlot(String jobId, int from, int size) { for (SearchHit hit : searchResponse.getHits().getHits()) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)) + XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { ModelPlot modelPlot = ModelPlot.LENIENT_PARSER.apply(parser, null); results.add(modelPlot); @@ -1232,10 +1231,8 @@ public void onResponse(GetResponse getDocResponse) { BytesReference docSource = getDocResponse.getSourceAsBytesRef(); try (InputStream stream = docSource.streamInput(); - XContentParser parser = - XContentFactory.xContent(XContentHelper.xContentType(docSource)) - .createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, stream)) { + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { Calendar calendar = Calendar.LENIENT_PARSER.apply(parser, null).build(); listener.onResponse(calendar); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java new file mode 100644 index 0000000000000..80acf3d7e4e35 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.rest.filter; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.ml.MachineLearning; + +import java.io.IOException; + +public class RestUpdateFilterAction extends BaseRestHandler { + + public RestUpdateFilterAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.POST, + MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}/_update", this); + } + + @Override + public String getName() { + return "xpack_ml_update_filter_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String filterId = restRequest.param(MlFilter.ID.getPreferredName()); + XContentParser parser = restRequest.contentOrSourceParamParser(); + UpdateFilterAction.Request putFilterRequest = UpdateFilterAction.Request.parseRequest(filterId, parser); + return channel -> client.execute(UpdateFilterAction.INSTANCE, putFilterRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java index 42b0a56f49a82..cf925963c198a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java @@ -41,12 +41,14 @@ import org.junit.Before; import org.mockito.ArgumentCaptor; import org.mockito.Matchers; +import org.mockito.Mockito; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; +import java.util.TreeSet; import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; import static org.elasticsearch.xpack.ml.action.TransportOpenJobActionTests.addJobTask; @@ -174,7 +176,16 @@ public void onFailure(Exception e) { }); } - public void testUpdateProcessOnFilterChanged() { + public void testNotifyFilterChangedGivenNoop() { + MlFilter filter = MlFilter.builder("my_filter").build(); + JobManager jobManager = createJobManager(); + + jobManager.notifyFilterChanged(filter, Collections.emptySet(), Collections.emptySet()); + + Mockito.verifyNoMoreInteractions(auditor, updateJobProcessNotifier); + } + + public void testNotifyFilterChanged() { Detector.Builder detectorReferencingFilter = new Detector.Builder("count", null); detectorReferencingFilter.setByFieldName("foo"); DetectionRule filterRule = new DetectionRule.Builder(RuleScope.builder().exclude("foo", "foo_filter")).build(); @@ -208,11 +219,18 @@ public void testUpdateProcessOnFilterChanged() { .build(); when(clusterService.state()).thenReturn(clusterState); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + listener.onResponse(true); + return null; + }).when(updateJobProcessNotifier).submitJobUpdate(any(), any()); + JobManager jobManager = createJobManager(); MlFilter filter = MlFilter.builder("foo_filter").setItems("a", "b").build(); - jobManager.updateProcessOnFilterChanged(filter); + jobManager.notifyFilterChanged(filter, new TreeSet<>(Arrays.asList("item 1", "item 2")), + new TreeSet<>(Collections.singletonList("item 3"))); ArgumentCaptor updateParamsCaptor = ArgumentCaptor.forClass(UpdateParams.class); verify(updateJobProcessNotifier, times(2)).submitJobUpdate(updateParamsCaptor.capture(), any(ActionListener.class)); @@ -223,6 +241,74 @@ public void testUpdateProcessOnFilterChanged() { assertThat(capturedUpdateParams.get(0).getFilter(), equalTo(filter)); assertThat(capturedUpdateParams.get(1).getJobId(), equalTo(jobReferencingFilter2.getId())); assertThat(capturedUpdateParams.get(1).getFilter(), equalTo(filter)); + + verify(auditor).info(jobReferencingFilter1.getId(), "Filter [foo_filter] has been modified; added items: " + + "['item 1', 'item 2'], removed items: ['item 3']"); + verify(auditor).info(jobReferencingFilter2.getId(), "Filter [foo_filter] has been modified; added items: " + + "['item 1', 'item 2'], removed items: ['item 3']"); + verify(auditor).info(jobReferencingFilter3.getId(), "Filter [foo_filter] has been modified; added items: " + + "['item 1', 'item 2'], removed items: ['item 3']"); + Mockito.verifyNoMoreInteractions(auditor, updateJobProcessNotifier); + } + + public void testNotifyFilterChangedGivenOnlyAddedItems() { + Detector.Builder detectorReferencingFilter = new Detector.Builder("count", null); + detectorReferencingFilter.setByFieldName("foo"); + DetectionRule filterRule = new DetectionRule.Builder(RuleScope.builder().exclude("foo", "foo_filter")).build(); + detectorReferencingFilter.setRules(Collections.singletonList(filterRule)); + AnalysisConfig.Builder filterAnalysisConfig = new AnalysisConfig.Builder(Collections.singletonList( + detectorReferencingFilter.build())); + + Job.Builder jobReferencingFilter = buildJobBuilder("job-referencing-filter"); + jobReferencingFilter.setAnalysisConfig(filterAnalysisConfig); + + MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); + mlMetadata.putJob(jobReferencingFilter.build(), false); + + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder() + .putCustom(MLMetadataField.TYPE, mlMetadata.build())) + .build(); + when(clusterService.state()).thenReturn(clusterState); + + JobManager jobManager = createJobManager(); + + MlFilter filter = MlFilter.builder("foo_filter").build(); + + jobManager.notifyFilterChanged(filter, new TreeSet<>(Arrays.asList("a", "b")), Collections.emptySet()); + + verify(auditor).info(jobReferencingFilter.getId(), "Filter [foo_filter] has been modified; added items: ['a', 'b']"); + Mockito.verifyNoMoreInteractions(auditor, updateJobProcessNotifier); + } + + public void testNotifyFilterChangedGivenOnlyRemovedItems() { + Detector.Builder detectorReferencingFilter = new Detector.Builder("count", null); + detectorReferencingFilter.setByFieldName("foo"); + DetectionRule filterRule = new DetectionRule.Builder(RuleScope.builder().exclude("foo", "foo_filter")).build(); + detectorReferencingFilter.setRules(Collections.singletonList(filterRule)); + AnalysisConfig.Builder filterAnalysisConfig = new AnalysisConfig.Builder(Collections.singletonList( + detectorReferencingFilter.build())); + + Job.Builder jobReferencingFilter = buildJobBuilder("job-referencing-filter"); + jobReferencingFilter.setAnalysisConfig(filterAnalysisConfig); + + MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); + mlMetadata.putJob(jobReferencingFilter.build(), false); + + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder() + .putCustom(MLMetadataField.TYPE, mlMetadata.build())) + .build(); + when(clusterService.state()).thenReturn(clusterState); + + JobManager jobManager = createJobManager(); + + MlFilter filter = MlFilter.builder("foo_filter").build(); + + jobManager.notifyFilterChanged(filter, Collections.emptySet(), new TreeSet<>(Arrays.asList("a", "b"))); + + verify(auditor).info(jobReferencingFilter.getId(), "Filter [foo_filter] has been modified; removed items: ['a', 'b']"); + Mockito.verifyNoMoreInteractions(auditor, updateJobProcessNotifier); } public void testUpdateProcessOnCalendarChanged() { diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.update_filter.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.update_filter.json new file mode 100644 index 0000000000000..06aceea4c1240 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.update_filter.json @@ -0,0 +1,20 @@ +{ + "xpack.ml.update_filter": { + "methods": [ "POST" ], + "url": { + "path": "/_xpack/ml/filters/{filter_id}/_update", + "paths": [ "/_xpack/ml/filters/{filter_id}/_update" ], + "parts": { + "filter_id": { + "type": "string", + "required": true, + "description": "The ID of the filter to update" + } + } + }, + "body": { + "description" : "The filter update", + "required" : true + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/custom_all_field.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/custom_all_field.yml index ffbbf4d95bdda..c206a08e6ca91 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/custom_all_field.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/custom_all_field.yml @@ -30,6 +30,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-custom-all-test-1 type: doc @@ -56,6 +57,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-custom-all-test-2 type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml index 1a587c47fd573..c13b2473cc785 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml @@ -34,6 +34,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-delete-model-snapshot type: doc @@ -76,6 +77,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-delete-model-snapshot type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml index 4c184d34c995e..d787e07b8c28c 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml @@ -4,6 +4,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-meta type: doc @@ -112,25 +113,25 @@ setup: "Test create filter api": - do: xpack.ml.put_filter: - filter_id: filter-foo2 + filter_id: new-filter body: > { "description": "A newly created filter", "items": ["abc", "xyz"] } - - match: { filter_id: filter-foo2 } + - match: { filter_id: new-filter } - match: { description: "A newly created filter" } - match: { items: ["abc", "xyz"]} - do: xpack.ml.get_filters: - filter_id: "filter-foo2" + filter_id: "new-filter" - match: { count: 1 } - match: filters.0: - filter_id: "filter-foo2" + filter_id: "new-filter" description: "A newly created filter" items: ["abc", "xyz"] @@ -146,6 +147,65 @@ setup: "items": ["abc", "xyz"] } +--- +"Test update filter given no filter matches filter_id": + - do: + catch: missing + xpack.ml.update_filter: + filter_id: "missing_filter" + body: > + { + } + +--- +"Test update filter": + - do: + xpack.ml.put_filter: + filter_id: "test_update_filter" + body: > + { + "description": "old description", + "items": ["a", "b"] + } + - match: { filter_id: test_update_filter } + + - do: + xpack.ml.update_filter: + filter_id: "test_update_filter" + body: > + { + "description": "new description", + "add_items": ["c", "d"], + "remove_items": ["a"] + } + - match: { filter_id: test_update_filter } + - match: { description: "new description" } + - match: { items: ["b", "c", "d"] } + + - do: + xpack.ml.get_filters: + filter_id: "test_update_filter" + - match: + filters.0: + filter_id: "test_update_filter" + description: "new description" + items: ["b", "c", "d"] + + - do: + xpack.ml.delete_filter: + filter_id: test_update_filter + +--- +"Test update filter given remove item is not present": + - do: + catch: /Cannot remove item \[not present item\] as it is not present in filter \[filter-foo\]/ + xpack.ml.update_filter: + filter_id: "filter-foo" + body: > + { + "remove_items": ["not present item"] + } + --- "Test delete in-use filter": - do: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_model_snapshots.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_model_snapshots.yml index 57cc80ae2fb73..e411251363b71 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_model_snapshots.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_model_snapshots.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-model-snapshots type: doc @@ -33,6 +34,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-state type: doc @@ -44,6 +46,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-model-snapshots type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml index c13ae86e06f50..6a60bbb96da6f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml @@ -556,6 +556,8 @@ - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json + index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml index df505176ae739..3b08753e20913 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml @@ -419,6 +419,8 @@ - match: { job_id: "jobs-crud-model-memory-limit-decrease" } - do: + headers: + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -929,6 +931,8 @@ "Test cannot create job with existing result document": - do: + headers: + Content-Type: application/json index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_buckets.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_buckets.yml index 2a7a7970e5db2..125f8cbf7f8d2 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_buckets.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_buckets.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-buckets type: doc @@ -34,6 +35,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-buckets type: doc @@ -50,6 +52,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-buckets type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_categories.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_categories.yml index 565f1612f89a2..307a1d0a80d7e 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_categories.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_categories.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-categories type: doc @@ -26,6 +27,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-categories type: doc @@ -34,6 +36,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-unrelated type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_influencers.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_influencers.yml index 50f0cfc6816bc..9b875fb1afd86 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_influencers.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_influencers.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-influencers-test type: doc @@ -36,6 +37,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-influencers-test type: doc @@ -55,6 +57,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-influencers-test type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_overall_buckets.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_overall_buckets.yml index 75f35f311177c..249ff7c72d7ad 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_overall_buckets.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_overall_buckets.yml @@ -59,6 +59,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -75,6 +76,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -91,6 +93,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -123,6 +126,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -139,6 +143,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -155,6 +160,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -171,6 +177,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -187,6 +194,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -203,6 +211,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_records.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_records.yml index b5dae2045f440..513e1fb875774 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_records.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_records.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-records type: doc @@ -34,6 +35,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-records type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml index 61bcf63e39869..b841c8c23069f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml @@ -226,6 +226,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -250,6 +251,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_anomalies_default_mappings.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_anomalies_default_mappings.yml index 42fca7b81a036..0f01613203704 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_anomalies_default_mappings.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_anomalies_default_mappings.yml @@ -19,6 +19,7 @@ - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/revert_model_snapshot.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/revert_model_snapshot.yml index a66c0da12d0a9..ce638fdceaa19 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/revert_model_snapshot.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/revert_model_snapshot.yml @@ -34,6 +34,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -61,6 +62,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -88,6 +90,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -103,6 +106,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -118,6 +122,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -133,6 +138,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -148,6 +154,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -163,6 +170,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -180,6 +188,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/update_model_snapshot.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/update_model_snapshot.yml index 6a1d6e117e924..9966ae668c08f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/update_model_snapshot.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/update_model_snapshot.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-update-model-snapshot type: doc @@ -67,6 +68,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-update-model-snapshot type: doc diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java index fbda8ad716b2c..7f018f967fbfd 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.DetectionRule; @@ -34,6 +35,7 @@ import java.util.Map; import java.util.Set; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isOneOf; @@ -177,10 +179,12 @@ public void testScope() throws Exception { assertThat(records.get(0).getOverFieldValue(), equalTo("333.333.333.333")); // Now let's update the filter - MlFilter updatedFilter = MlFilter.builder(safeIps.getId()).setItems("333.333.333.333").build(); - assertThat(putMlFilter(updatedFilter).getFilter(), equalTo(updatedFilter)); + UpdateFilterAction.Request updateFilterRequest = new UpdateFilterAction.Request(safeIps.getId()); + updateFilterRequest.setRemoveItems(safeIps.getItems()); + updateFilterRequest.setAddItems(Collections.singletonList("333.333.333.333")); + client().execute(UpdateFilterAction.INSTANCE, updateFilterRequest).get(); - // Wait until the notification that the process was updated is indexed + // Wait until the notification that the filter was updated is indexed assertBusy(() -> { SearchResponse searchResponse = client().prepareSearch(".ml-notifications") .setSize(1) @@ -191,7 +195,7 @@ public void testScope() throws Exception { ).get(); SearchHit[] hits = searchResponse.getHits().getHits(); assertThat(hits.length, equalTo(1)); - assertThat(hits[0].getSourceAsMap().get("message"), equalTo("Updated filter [safe_ips] in running process")); + assertThat((String) hits[0].getSourceAsMap().get("message"), containsString("Filter [safe_ips] has been modified")); }); long secondAnomalyTime = timestamp; diff --git a/x-pack/qa/smoke-test-ml-with-security/build.gradle b/x-pack/qa/smoke-test-ml-with-security/build.gradle index ebe55c2b7ef29..58e5eca3600f6 100644 --- a/x-pack/qa/smoke-test-ml-with-security/build.gradle +++ b/x-pack/qa/smoke-test-ml-with-security/build.gradle @@ -42,6 +42,7 @@ integTestRunner { 'ml/filter_crud/Test get filter API with bad ID', 'ml/filter_crud/Test invalid param combinations', 'ml/filter_crud/Test non-existing filter', + 'ml/filter_crud/Test update filter given remove item is not present', 'ml/get_datafeed_stats/Test get datafeed stats given missing datafeed_id', 'ml/get_datafeeds/Test get datafeed given missing datafeed_id', 'ml/jobs_crud/Test cannot create job with existing categorizer state document', From f023e95ae0ce4828085631b342d518c0e572a8aa Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 Jun 2018 16:17:17 +0200 Subject: [PATCH 23/34] Upgrade to Lucene 7.4.0. (#31529) This moves Elasticsearch from a recent 7.4.0 snapshot to the GA release. --- buildSrc/version.properties | 2 +- .../lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 | 1 + .../lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 | 1 + ...lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 | 1 + .../lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 | 1 + ...lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 | 1 + .../lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 | 1 + .../lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 | 1 + ...cene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 | 1 - .../licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 | 1 + .../lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 | 1 + .../lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 | 1 + server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-core-7.4.0.jar.sha1 | 1 + .../licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-grouping-7.4.0.jar.sha1 | 1 + .../lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-highlighter-7.4.0.jar.sha1 | 1 + server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-join-7.4.0.jar.sha1 | 1 + .../licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-memory-7.4.0.jar.sha1 | 1 + server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-misc-7.4.0.jar.sha1 | 1 + .../licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-queries-7.4.0.jar.sha1 | 1 + .../lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-queryparser-7.4.0.jar.sha1 | 1 + .../licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-sandbox-7.4.0.jar.sha1 | 1 + .../licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-spatial-7.4.0.jar.sha1 | 1 + .../lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 | 1 + .../lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-spatial3d-7.4.0.jar.sha1 | 1 + .../licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 | 1 - server/licenses/lucene-suggest-7.4.0.jar.sha1 | 1 + .../licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 | 1 - x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 | 1 + 49 files changed, 25 insertions(+), 25 deletions(-) delete mode 100644 modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-core-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-grouping-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-join-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-memory-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-misc-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-queries-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-spatial-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-7.4.0.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 server/licenses/lucene-suggest-7.4.0.jar.sha1 delete mode 100644 x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 create mode 100644 x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index d89ffa78ed852..17e5cb5ff01f5 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0-alpha1 -lucene = 7.4.0-snapshot-518d303506 +lucene = 7.4.0 # optional dependencies spatial4j = 0.7 diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 2e666a2d566b0..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a57659a275921d8ab3f7ec580e9bf713ce6143b1 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..2b14a61f264fa --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 @@ -0,0 +1 @@ +9f0a326f7ec1671ffb07f95b27f1a5812b7dc1c3 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 03f1b7d27aed5..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b91a260d8d12ee4b3302a63059c73a34de0ce146 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..b5291b30c7de8 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 @@ -0,0 +1 @@ +394e811e9d9bf0b9fba837f7ceca9e8f3e39d1c2 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 9a5c6669009eb..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cc1ca9bd9e2c162dd1da8c2e7111913fd8033e48 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..49f55bea5e687 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 @@ -0,0 +1 @@ +5cd56acfa16ba20e19b5d21d90b510eada841431 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index cbf4f78c31999..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2fa3662a10a9e085b1c7b87293d727422cbe6224 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..c4b61b763b483 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 @@ -0,0 +1 @@ +db7b56f4cf533ad9022d2312c5ee48331edccca3 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index bd5bf428b6d44..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -60aa50c11857e6739e68936cb45102562b2c46b4 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..779cac9761242 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 @@ -0,0 +1 @@ +e8dba4d28a595eab2e8fb6095d1ac5f2d3872144 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index a73900802ace1..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4586368007785a3be26db4b9ce404ffb8c76f350 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..cf5c49a2759c9 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 @@ -0,0 +1 @@ +1243c771ee824c46a3d66ae3e4256d919fc06fbe \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index bf0a50f7154e5..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9c6d030ab2c148df7a6ba73a774ef4b8c720a6cb \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..830b9ccf9cbe2 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 @@ -0,0 +1 @@ +c783794b0d20d8dc1285edc7701f386b1f0e2fb8 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index ba6ceb2aed9d8..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8275bf8df2644d5fcec2963cf237d14b6e00fefe \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..a96e05f5e3b87 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 @@ -0,0 +1 @@ +9438efa504a89afb6cb4c66448c257f865164d23 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 4c0db7a735c8d..0000000000000 --- a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -557d62d2b13d3dcb1810a1633e22625e42425425 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..928cc6dea046c --- /dev/null +++ b/server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 @@ -0,0 +1 @@ +e1afb580df500626a1c695e0fc9a7e8a8f58bcac \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 0579316096a72..0000000000000 --- a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d3755ad4c98b49fe5055b32358e3071727177c03 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..a94663119e7d6 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 @@ -0,0 +1 @@ +a6ad941ef1fdad48673ed511631b7e48a9456bf7 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 134072bc13701..0000000000000 --- a/server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c1bbf611535f0b0fd0ba14e8da67c8d645b95244 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0.jar.sha1 b/server/licenses/lucene-core-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..80ba6c76aa301 --- /dev/null +++ b/server/licenses/lucene-core-7.4.0.jar.sha1 @@ -0,0 +1 @@ +730d9ac80436c8cbc0b2a8a749259be536b97316 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 8a3327cc8a227..0000000000000 --- a/server/licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b62ebd53bbefb2f59cd246157a6768cae8a5a3a1 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0.jar.sha1 b/server/licenses/lucene-grouping-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..5b781d26829ed --- /dev/null +++ b/server/licenses/lucene-grouping-7.4.0.jar.sha1 @@ -0,0 +1 @@ +56f99858a4421a517b52da36a222debcccab80c6 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 75fb5a7755639..0000000000000 --- a/server/licenses/lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cba0fd4ccb98db8a72287a95d6b653e455f9eeb3 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..e1ebb95fe1b05 --- /dev/null +++ b/server/licenses/lucene-highlighter-7.4.0.jar.sha1 @@ -0,0 +1 @@ +5266b45d7f049662817d739881765904621876d0 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 01e0197bc1713..0000000000000 --- a/server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5127ed0b7516f8b28d84e837df4f33c67e361f6c \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0.jar.sha1 b/server/licenses/lucene-join-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..ff81c33c3f860 --- /dev/null +++ b/server/licenses/lucene-join-7.4.0.jar.sha1 @@ -0,0 +1 @@ +c77154d18c4944ceb6ce0741060632f57d623fdc \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 3d6069f2a5c8b..0000000000000 --- a/server/licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -45c7b13aae1104f9f5f0fca0606e5741309c8d74 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0.jar.sha1 b/server/licenses/lucene-memory-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..7c0117dff6b68 --- /dev/null +++ b/server/licenses/lucene-memory-7.4.0.jar.sha1 @@ -0,0 +1 @@ +186ff981feec1bdbf1a6236e786ec171b5fbe3e0 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index a74be59aea39c..0000000000000 --- a/server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2540c4b5d9dca8a39a3b4d58efe4ab484df7254f \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0.jar.sha1 b/server/licenses/lucene-misc-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..5cdf6810fa57c --- /dev/null +++ b/server/licenses/lucene-misc-7.4.0.jar.sha1 @@ -0,0 +1 @@ +bf844bb6f6d84da19e8c79ce5fbb4cf6d00f2611 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index cf26412b63f80..0000000000000 --- a/server/licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e9d0c0c020917d4bf9b590526866ff5547dbaa17 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0.jar.sha1 b/server/licenses/lucene-queries-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..198890379374f --- /dev/null +++ b/server/licenses/lucene-queries-7.4.0.jar.sha1 @@ -0,0 +1 @@ +229a50e6d9d4db076f671c230d493000c6e2972c \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 63533b774673f..0000000000000 --- a/server/licenses/lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -50969cdb7279047fbec94dda6e7d74d1c73c07f8 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..afdc275afe2b3 --- /dev/null +++ b/server/licenses/lucene-queryparser-7.4.0.jar.sha1 @@ -0,0 +1 @@ +8e58add0d0c39df97d07c8e343041989bf4b3a3f \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 4eab31d62bd41..0000000000000 --- a/server/licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -94524b293572b1f0d01a0faeeade1ff24713f966 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..81ae3bddd0709 --- /dev/null +++ b/server/licenses/lucene-sandbox-7.4.0.jar.sha1 @@ -0,0 +1 @@ +1692604fa06a945d1ee19939022ef1a912235db3 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index ae5a2ea0375fd..0000000000000 --- a/server/licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -878db723e41ece636ed338c4ef374e900f221a14 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0.jar.sha1 b/server/licenses/lucene-spatial-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..cc3f31340b9a2 --- /dev/null +++ b/server/licenses/lucene-spatial-7.4.0.jar.sha1 @@ -0,0 +1 @@ +847d2f897961124e2fc7d5e55d8309635bb026bc \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 9f5129d89056a..0000000000000 --- a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c8dc85c32aeac6ff320aa6a9ea57881ad4847a55 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..3f05790e430f5 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 @@ -0,0 +1 @@ +586892eefc0546643d7f5d7f83659c7db0d534ff \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 02fcef681fc30..0000000000000 --- a/server/licenses/lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -203d8d22ab172e624784a5fdeaecdd01ae25fb3d \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..8c767b16c538b --- /dev/null +++ b/server/licenses/lucene-spatial3d-7.4.0.jar.sha1 @@ -0,0 +1 @@ +32cd2854f39ff453a5d128ce40e11eea4168abbf \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index a7daa7ff02a38..0000000000000 --- a/server/licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4d6cf8fa1064a86991d5cd12a2ed32119ac91212 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0.jar.sha1 b/server/licenses/lucene-suggest-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..59d59cf79413a --- /dev/null +++ b/server/licenses/lucene-suggest-7.4.0.jar.sha1 @@ -0,0 +1 @@ +0cdc1a512032f8b23dd4b1add0f5cd06325addc3 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 134072bc13701..0000000000000 --- a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c1bbf611535f0b0fd0ba14e8da67c8d645b95244 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..80ba6c76aa301 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 @@ -0,0 +1 @@ +730d9ac80436c8cbc0b2a8a749259be536b97316 \ No newline at end of file From 59e7c6411a04f08a325f02d612e12eab12b22316 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 22 Jun 2018 07:36:03 -0700 Subject: [PATCH 24/34] Core: Combine messageRecieved methods in TransportRequestHandler (#31519) TransportRequestHandler currently contains 2 messageReceived methods, one which takes a Task, and one that does not. The first just delegates to the second. This commit changes all existing implementors of TransportRequestHandler to implement the version which takes Task, thus allowing the class to be a functional interface, and eliminating the need to throw exceptions when a task needs to be ensured. --- .../netty4/Netty4ScheduledPingTests.java | 3 +- ...rossClusterSearchUnavailableClusterIT.java | 4 +- .../liveness/TransportLivenessAction.java | 3 +- .../cancel/TransportCancelTasksAction.java | 3 +- .../action/search/SearchTransportService.java | 169 +++++++----------- .../support/HandledTransportAction.java | 5 - .../broadcast/TransportBroadcastAction.java | 5 - .../node/TransportBroadcastByNodeAction.java | 2 +- .../support/nodes/TransportNodesAction.java | 6 - .../TransportReplicationAction.java | 16 -- ...ransportInstanceSingleOperationAction.java | 3 +- .../shard/TransportSingleShardAction.java | 5 +- .../support/tasks/TransportTasksAction.java | 2 +- .../index/NodeMappingRefreshAction.java | 3 +- .../action/shard/ShardStateAction.java | 5 +- .../discovery/zen/MasterFaultDetection.java | 3 +- .../discovery/zen/MembershipAction.java | 7 +- .../discovery/zen/NodesFaultDetection.java | 3 +- .../zen/PublishClusterStateAction.java | 5 +- .../discovery/zen/UnicastZenPing.java | 3 +- .../discovery/zen/ZenDiscovery.java | 3 +- .../gateway/LocalAllocateDangledIndices.java | 3 +- .../indices/flush/SyncedFlushService.java | 7 +- .../recovery/PeerRecoverySourceService.java | 3 +- .../recovery/PeerRecoveryTargetService.java | 22 ++- .../indices/store/IndicesStore.java | 3 +- .../VerifyNodeRepositoryAction.java | 3 +- .../transport/RequestHandlerRegistry.java | 2 +- .../TaskAwareTransportRequestHandler.java | 30 ---- .../transport/TransportActionProxy.java | 3 +- .../transport/TransportRequestHandler.java | 9 +- .../transport/TransportService.java | 2 +- .../action/IndicesRequestIT.java | 5 - .../TransportBroadcastByNodeActionTests.java | 2 +- .../TransportClientNodesServiceTests.java | 3 +- .../discovery/zen/ZenDiscoveryUnitTests.java | 4 +- .../RemoteClusterConnectionTests.java | 4 +- .../transport/TransportActionProxyTests.java | 14 +- .../AbstractSimpleTransportTestCase.java | 63 +++---- .../action/TransportRollupSearchAction.java | 5 - .../SecurityServerTransportInterceptor.java | 5 - 41 files changed, 177 insertions(+), 273 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/transport/TaskAwareTransportRequestHandler.java diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java index b967a7ea41069..bd62ff0af0b5a 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; @@ -91,7 +92,7 @@ public void testScheduledPing() throws Exception { serviceA.registerRequestHandler("sayHello", TransportRequest.Empty::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(TransportRequest.Empty request, TransportChannel channel) { + public void messageReceived(TransportRequest.Empty request, TransportChannel channel, Task task) { try { channel.sendResponse(TransportResponse.Empty.INSTANCE, TransportResponseOptions.EMPTY); } catch (IOException e) { diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 73df782c92049..29aec900cefa9 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -103,12 +103,12 @@ private static MockTransportService startTransport( MockTransportService newService = MockTransportService.createNewService(s, version, threadPool, null); try { newService.registerRequestHandler(ClusterSearchShardsAction.NAME, ThreadPool.Names.SAME, ClusterSearchShardsRequest::new, - (request, channel) -> { + (request, channel, task) -> { channel.sendResponse(new ClusterSearchShardsResponse(new ClusterSearchShardsGroup[0], knownNodes.toArray(new DiscoveryNode[0]), Collections.emptyMap())); }); newService.registerRequestHandler(ClusterStateAction.NAME, ThreadPool.Names.SAME, ClusterStateRequest::new, - (request, channel) -> { + (request, channel, task) -> { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); for (DiscoveryNode node : knownNodes) { builder.add(node); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java index 09c608ac84280..ef8014cade4dc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequestHandler; @@ -39,7 +40,7 @@ public TransportLivenessAction(ClusterService clusterService, TransportService t } @Override - public void messageReceived(LivenessRequest request, TransportChannel channel) throws Exception { + public void messageReceived(LivenessRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(new LivenessResponse(clusterService.getClusterName(), clusterService.localNode())); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index b99630dd4f960..918d56867627b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.threadpool.ThreadPool; @@ -285,7 +286,7 @@ public void writeTo(StreamOutput out) throws IOException { class BanParentRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final BanParentTaskRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final BanParentTaskRequest request, final TransportChannel channel, Task task) throws Exception { if (request.ban) { logger.debug("Received ban for the parent [{}] on the node [{}], reason: [{}]", request.parentTaskId, clusterService.localNode().getId(), request.reason); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 8a4c8b0882f08..dd43b82f8b862 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -45,13 +45,10 @@ import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.ScrollQuerySearchResult; -import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterService; -import org.elasticsearch.transport.TaskAwareTransportRequestHandler; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportActionProxy; -import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; @@ -314,150 +311,116 @@ public void writeTo(StreamOutput out) throws IOException { public static void registerRequestHandler(TransportService transportService, SearchService searchService) { transportService.registerRequestHandler(FREE_CONTEXT_SCROLL_ACTION_NAME, ThreadPool.Names.SAME, ScrollFreeContextRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ScrollFreeContextRequest request, TransportChannel channel, Task task) throws Exception { - boolean freed = searchService.freeContext(request.id()); - channel.sendResponse(new SearchFreeContextResponse(freed)); - } - }); + (request, channel, task) -> { + boolean freed = searchService.freeContext(request.id()); + channel.sendResponse(new SearchFreeContextResponse(freed)); + }); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_SCROLL_ACTION_NAME, (Supplier) SearchFreeContextResponse::new); transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, ThreadPool.Names.SAME, SearchFreeContextRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(SearchFreeContextRequest request, TransportChannel channel, Task task) throws Exception { - boolean freed = searchService.freeContext(request.id()); - channel.sendResponse(new SearchFreeContextResponse(freed)); - } - }); + (request, channel, task) -> { + boolean freed = searchService.freeContext(request.id()); + channel.sendResponse(new SearchFreeContextResponse(freed)); + }); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_ACTION_NAME, (Supplier) SearchFreeContextResponse::new); transportService.registerRequestHandler(CLEAR_SCROLL_CONTEXTS_ACTION_NAME, () -> TransportRequest.Empty.INSTANCE, - ThreadPool.Names.SAME, new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(TransportRequest.Empty request, TransportChannel channel, Task task) throws Exception { - searchService.freeAllScrollContexts(); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } - }); + ThreadPool.Names.SAME, (request, channel, task) -> { + searchService.freeAllScrollContexts(); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + }); TransportActionProxy.registerProxyAction(transportService, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, () -> TransportResponse.Empty.INSTANCE); transportService.registerRequestHandler(DFS_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception { - searchService.executeDfsPhase(request, (SearchTask) task, new ActionListener() { - @Override - public void onResponse(SearchPhaseResult searchPhaseResult) { - try { - channel.sendResponse(searchPhaseResult); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + (request, channel, task) -> { + searchService.executeDfsPhase(request, (SearchTask) task, new ActionListener() { + @Override + public void onResponse(SearchPhaseResult searchPhaseResult) { + try { + channel.sendResponse(searchPhaseResult); + } catch (IOException e) { + throw new UncheckedIOException(e); } - - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (IOException e1) { - throw new UncheckedIOException(e1); - } + } + + @Override + public void onFailure(Exception e) { + try { + channel.sendResponse(e); + } catch (IOException e1) { + throw new UncheckedIOException(e1); } - }); - - } + } + }); }); TransportActionProxy.registerProxyAction(transportService, DFS_ACTION_NAME, DfsSearchResult::new); transportService.registerRequestHandler(QUERY_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception { - searchService.executeQueryPhase(request, (SearchTask) task, new ActionListener() { - @Override - public void onResponse(SearchPhaseResult searchPhaseResult) { - try { - channel.sendResponse(searchPhaseResult); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + (request, channel, task) -> { + searchService.executeQueryPhase(request, (SearchTask) task, new ActionListener() { + @Override + public void onResponse(SearchPhaseResult searchPhaseResult) { + try { + channel.sendResponse(searchPhaseResult); + } catch (IOException e) { + throw new UncheckedIOException(e); } - - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (IOException e1) { - throw new UncheckedIOException(e1); - } + } + + @Override + public void onFailure(Exception e) { + try { + channel.sendResponse(e); + } catch (IOException e1) { + throw new UncheckedIOException(e1); } - }); - } + } + }); }); TransportActionProxy.registerProxyAction(transportService, QUERY_ACTION_NAME, (request) -> ((ShardSearchRequest)request).numberOfShards() == 1 ? QueryFetchSearchResult::new : QuerySearchResult::new); transportService.registerRequestHandler(QUERY_ID_ACTION_NAME, ThreadPool.Names.SEARCH, QuerySearchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(QuerySearchRequest request, TransportChannel channel, Task task) throws Exception { - QuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + QuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, QUERY_ID_ACTION_NAME, QuerySearchResult::new); transportService.registerRequestHandler(QUERY_SCROLL_ACTION_NAME, ThreadPool.Names.SEARCH, InternalScrollSearchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel, Task task) throws Exception { - ScrollQuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + ScrollQuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, QUERY_SCROLL_ACTION_NAME, ScrollQuerySearchResult::new); transportService.registerRequestHandler(QUERY_FETCH_SCROLL_ACTION_NAME, ThreadPool.Names.SEARCH, InternalScrollSearchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel, Task task) throws Exception { - ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, QUERY_FETCH_SCROLL_ACTION_NAME, ScrollQueryFetchSearchResult::new); transportService.registerRequestHandler(FETCH_ID_SCROLL_ACTION_NAME, ThreadPool.Names.SEARCH, ShardFetchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardFetchRequest request, TransportChannel channel, Task task) throws Exception { - FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, FETCH_ID_SCROLL_ACTION_NAME, FetchSearchResult::new); transportService.registerRequestHandler(FETCH_ID_ACTION_NAME, ThreadPool.Names.SEARCH, ShardFetchSearchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardFetchSearchRequest request, TransportChannel channel, Task task) throws Exception { - FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, FETCH_ID_ACTION_NAME, FetchSearchResult::new); // this is cheap, it does not fetch during the rewrite phase, so we can let it quickly execute on a networking thread transportService.registerRequestHandler(QUERY_CAN_MATCH_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception { - boolean canMatch = searchService.canMatch(request); - channel.sendResponse(new CanMatchResponse(canMatch)); - } + (request, channel, task) -> { + boolean canMatch = searchService.canMatch(request); + channel.sendResponse(new CanMatchResponse(canMatch)); }); TransportActionProxy.registerProxyAction(transportService, QUERY_CAN_MATCH_NAME, (Supplier) CanMatchResponse::new); diff --git a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index 7cdcd017b9946..c55e0cff6f250 100644 --- a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -64,11 +64,6 @@ protected HandledTransportAction(Settings settings, String actionName, boolean c class TransportHandler implements TransportRequestHandler { - @Override - public final void messageReceived(Request request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override public final void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { // We already got the task created on the network layer - no need to create it again on the transport layer diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index 8a28c2c9d891d..1bec46fd1213e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -284,10 +284,5 @@ class ShardTransportHandler implements TransportRequestHandler { public void messageReceived(ShardRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(shardOperation(request, task)); } - - @Override - public final void messageReceived(final ShardRequest request, final TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required"); - } } } diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index dac1a55b6361f..348162b8c33bd 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -393,7 +393,7 @@ protected void onCompletion() { class BroadcastByNodeTransportRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final NodeRequest request, TransportChannel channel) throws Exception { + public void messageReceived(final NodeRequest request, TransportChannel channel, Task task) throws Exception { List shards = request.getShards(); final int totalShards = shards.size(); if (logger.isTraceEnabled()) { diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 7a074c91c7152..6a9ac53f7bebd 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -258,12 +258,6 @@ class NodeTransportHandler implements TransportRequestHandler { public void messageReceived(NodeRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(nodeOperation(request, task)); } - - @Override - public void messageReceived(NodeRequest request, TransportChannel channel) throws Exception { - channel.sendResponse(nodeOperation(request)); - } - } } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index d7c908bf9fa5b..c31ee81a802a5 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -273,11 +273,6 @@ public void onFailure(Exception e) { } }); } - - @Override - public void messageReceived(Request request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } } protected class PrimaryOperationTransportHandler implements TransportRequestHandler> { @@ -286,11 +281,6 @@ public PrimaryOperationTransportHandler() { } - @Override - public void messageReceived(final ConcreteShardRequest request, final TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override public void messageReceived(ConcreteShardRequest request, TransportChannel channel, Task task) { new AsyncPrimaryAction(request.request, request.targetAllocationID, request.primaryTerm, channel, (ReplicationTask) task).run(); @@ -493,12 +483,6 @@ public void respond(ActionListener listener) { public class ReplicaOperationTransportHandler implements TransportRequestHandler> { - @Override - public void messageReceived( - final ConcreteReplicaRequest replicaRequest, final TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override public void messageReceived( final ConcreteReplicaRequest replicaRequest, diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index 280a35207a9db..2d8ccb6e524f4 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportChannel; @@ -243,7 +244,7 @@ public void onTimeout(TimeValue timeout) { private class ShardTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(final Request request, final TransportChannel channel) throws Exception { + public void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { shardOperation(request, new ActionListener() { @Override public void onResponse(Response response) { diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index d7e5633559d8a..7116061640f3e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -40,6 +40,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; @@ -271,7 +272,7 @@ public void handleException(TransportException exp) { private class TransportHandler implements TransportRequestHandler { @Override - public void messageReceived(Request request, final TransportChannel channel) throws Exception { + public void messageReceived(Request request, final TransportChannel channel, Task task) throws Exception { // if we have a local operation, execute it on a thread since we don't spawn execute(request, new ActionListener() { @Override @@ -298,7 +299,7 @@ public void onFailure(Exception e) { private class ShardTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(final Request request, final TransportChannel channel) throws Exception { + public void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { if (logger.isTraceEnabled()) { logger.trace("executing [{}] on shard [{}]", request, request.internalShardId); } diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index 5599dd5f98b06..ee116d9f957c6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -338,7 +338,7 @@ private void finishHim() { class NodeTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(final NodeTaskRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final NodeTaskRequest request, final TransportChannel channel, Task task) throws Exception { nodeOperation(request, new ActionListener() { @Override public void onResponse( diff --git a/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java b/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java index fc7a4206486a3..2559c14848d76 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -65,7 +66,7 @@ public void nodeMappingRefresh(final DiscoveryNode masterNode, final NodeMapping private class NodeMappingRefreshTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(NodeMappingRefreshRequest request, TransportChannel channel) throws Exception { + public void messageReceived(NodeMappingRefreshRequest request, TransportChannel channel, Task task) throws Exception { metaDataMappingService.refreshMapping(request.index(), request.indexUUID()); channel.sendResponse(TransportResponse.Empty.INSTANCE); } diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index f690efa4c9a0c..0949e47cd0527 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -52,6 +52,7 @@ import org.elasticsearch.discovery.Discovery; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.EmptyTransportResponseHandler; @@ -237,7 +238,7 @@ private static class ShardFailedTransportHandler implements TransportRequestHand } @Override - public void messageReceived(FailedShardEntry request, TransportChannel channel) throws Exception { + public void messageReceived(FailedShardEntry request, TransportChannel channel, Task task) throws Exception { logger.debug(() -> new ParameterizedMessage("{} received shard failed for {}", request.shardId, request), request.failure); clusterService.submitStateUpdateTask( "shard-failed", @@ -487,7 +488,7 @@ private static class ShardStartedTransportHandler implements TransportRequestHan } @Override - public void messageReceived(StartedShardEntry request, TransportChannel channel) throws Exception { + public void messageReceived(StartedShardEntry request, TransportChannel channel, Task task) throws Exception { logger.debug("{} received shard started for [{}]", request.shardId, request); clusterService.submitStateUpdateTask( "shard-started " + request, diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/MasterFaultDetection.java b/server/src/main/java/org/elasticsearch/discovery/zen/MasterFaultDetection.java index c38cfe88619ee..5acf2effad390 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/MasterFaultDetection.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/MasterFaultDetection.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportChannel; @@ -321,7 +322,7 @@ public Throwable fillInStackTrace() { private class MasterPingRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final MasterPingRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final MasterPingRequest request, final TransportChannel channel, Task task) throws Exception { final DiscoveryNodes nodes = clusterStateSupplier.get().nodes(); // check if we are really the same master as the one we seemed to be think we are // this can happen if the master got "kill -9" and then another node started using the same port diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java b/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java index fdfcd8ac29079..e8bafea66d3a4 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -133,7 +134,7 @@ public void writeTo(StreamOutput out) throws IOException { private class JoinRequestRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final JoinRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final JoinRequest request, final TransportChannel channel, Task task) throws Exception { listener.onJoin(request.node, new JoinCallback() { @Override public void onSuccess() { @@ -190,7 +191,7 @@ static class ValidateJoinRequestRequestHandler implements TransportRequestHandle } @Override - public void messageReceived(ValidateJoinRequest request, TransportChannel channel) throws Exception { + public void messageReceived(ValidateJoinRequest request, TransportChannel channel, Task task) throws Exception { DiscoveryNode node = localNodeSupplier.get(); assert node != null : "local node is null"; joinValidators.stream().forEach(action -> action.accept(node, request.state)); @@ -281,7 +282,7 @@ public void writeTo(StreamOutput out) throws IOException { private class LeaveRequestRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(LeaveRequest request, TransportChannel channel) throws Exception { + public void messageReceived(LeaveRequest request, TransportChannel channel, Task task) throws Exception { listener.onLeave(request.node); channel.sendResponse(TransportResponse.Empty.INSTANCE); } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/NodesFaultDetection.java b/server/src/main/java/org/elasticsearch/discovery/zen/NodesFaultDetection.java index d19cc98441b79..57e5cab020be1 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/NodesFaultDetection.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/NodesFaultDetection.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportChannel; @@ -276,7 +277,7 @@ public String executor() { class PingRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(PingRequest request, TransportChannel channel) throws Exception { + public void messageReceived(PingRequest request, TransportChannel channel, Task task) throws Exception { // if we are not the node we are supposed to be pinged, send an exception // this can happen when a kill -9 is sent, and another node is started using the same port if (!localNode.equals(request.targetNode())) { diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java b/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java index 5398b2a057ae4..5e9f960e893cf 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java @@ -45,6 +45,7 @@ import org.elasticsearch.discovery.BlockingClusterStatePublishResponseHandler; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.EmptyTransportResponseHandler; @@ -447,14 +448,14 @@ public void onFailure(Exception e) { private class SendClusterStateRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(BytesTransportRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(BytesTransportRequest request, final TransportChannel channel, Task task) throws Exception { handleIncomingClusterStateRequest(request, channel); } } private class CommitClusterStateRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(CommitClusterStateRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(CommitClusterStateRequest request, final TransportChannel channel, Task task) throws Exception { handleCommitRequest(request, channel); } } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index 9c86fa17e9b06..74414dc446e6d 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -45,6 +45,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.KeyedLock; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; @@ -563,7 +564,7 @@ private UnicastPingResponse handlePingRequest(final UnicastPingRequest request) class UnicastPingRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(UnicastPingRequest request, TransportChannel channel) throws Exception { + public void messageReceived(UnicastPingRequest request, TransportChannel channel, Task task) throws Exception { if (closed) { throw new AlreadyClosedException("node is shutting down"); } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 55ecf7ca25fa6..eb9a9f8d4885d 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -56,6 +56,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.DiscoveryStats; import org.elasticsearch.discovery.zen.PublishClusterStateAction.IncomingClusterStateListener; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -1187,7 +1188,7 @@ public void writeTo(StreamOutput out) throws IOException { class RejoinClusterRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final RejoinClusterRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final RejoinClusterRequest request, final TransportChannel channel, Task task) throws Exception { try { channel.sendResponse(TransportResponse.Empty.INSTANCE); } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java b/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java index c8986b0493459..7bc2e38dde024 100644 --- a/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java +++ b/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.MasterNotDiscoveredException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; @@ -112,7 +113,7 @@ public interface Listener { class AllocateDangledRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final AllocateDangledRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final AllocateDangledRequest request, final TransportChannel channel, Task task) throws Exception { String[] indexNames = new String[request.indices.length]; for (int i = 0; i < request.indices.length; i++) { indexNames[i] = request.indices[i].getIndex().getName(); diff --git a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index 6ef6c1546d152..f01b4bb312174 100644 --- a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -54,6 +54,7 @@ import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; @@ -778,7 +779,7 @@ public String toString() { private final class PreSyncedFlushTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(PreShardSyncedFlushRequest request, TransportChannel channel) throws Exception { + public void messageReceived(PreShardSyncedFlushRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(performPreSyncedFlush(request)); } } @@ -786,7 +787,7 @@ public void messageReceived(PreShardSyncedFlushRequest request, TransportChannel private final class SyncedFlushTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(ShardSyncedFlushRequest request, TransportChannel channel) throws Exception { + public void messageReceived(ShardSyncedFlushRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(performSyncedFlush(request)); } } @@ -794,7 +795,7 @@ public void messageReceived(ShardSyncedFlushRequest request, TransportChannel ch private final class InFlightOpCountTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(InFlightOpsRequest request, TransportChannel channel) throws Exception { + public void messageReceived(InFlightOpsRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(performInFlightOps(request)); } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java index 51eabdd4e8c73..06e8a5734f69b 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequestHandler; @@ -103,7 +104,7 @@ private RecoveryResponse recover(final StartRecoveryRequest request) throws IOEx class StartRecoveryTransportRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final StartRecoveryRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final StartRecoveryRequest request, final TransportChannel channel, Task task) throws Exception { RecoveryResponse response = recover(request); channel.sendResponse(response); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index cb49eed25f8fe..aaa4697e5cbb5 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -55,6 +55,7 @@ import org.elasticsearch.index.translog.TranslogCorruptedException; import org.elasticsearch.indices.recovery.RecoveriesCollection.RecoveryRef; import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.FutureTransportResponseHandler; @@ -397,7 +398,8 @@ public interface RecoveryListener { class PrepareForTranslogOperationsRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, TransportChannel channel, + Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { recoveryRef.target().prepareForTranslogOperations(request.isFileBasedRecovery(), request.totalTranslogOps()); @@ -409,7 +411,7 @@ public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, class FinalizeRecoveryRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId())) { recoveryRef.target().finalizeRecovery(request.globalCheckpoint()); @@ -421,7 +423,7 @@ public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportCh class WaitForClusterStateRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryWaitForClusterStateRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryWaitForClusterStateRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { recoveryRef.target().ensureClusterStateVersion(request.clusterStateVersion()); @@ -433,7 +435,8 @@ public void messageReceived(RecoveryWaitForClusterStateRequest request, Transpor class HandoffPrimaryContextRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final RecoveryHandoffPrimaryContextRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final RecoveryHandoffPrimaryContextRequest request, final TransportChannel channel, + Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId())) { recoveryRef.target().handoffPrimaryContext(request.primaryContext()); } @@ -445,7 +448,8 @@ public void messageReceived(final RecoveryHandoffPrimaryContextRequest request, class TranslogOperationsRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final RecoveryTranslogOperationsRequest request, final TransportChannel channel) throws IOException { + public void messageReceived(final RecoveryTranslogOperationsRequest request, final TransportChannel channel, + Task task) throws IOException { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId())) { final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext()); @@ -463,7 +467,7 @@ public void messageReceived(final RecoveryTranslogOperationsRequest request, fin @Override public void onNewClusterState(ClusterState state) { try { - messageReceived(request, channel); + messageReceived(request, channel, task); } catch (Exception e) { onFailure(e); } @@ -537,7 +541,7 @@ public void onTimeout(TimeValue timeout) { class FilesInfoRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { recoveryRef.target().receiveFileInfo(request.phase1FileNames, request.phase1FileSizes, request.phase1ExistingFileNames, @@ -550,7 +554,7 @@ public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel c class CleanFilesRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryCleanFilesRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryCleanFilesRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { recoveryRef.target().cleanFiles(request.totalTranslogOps(), request.sourceMetaSnapshot()); @@ -565,7 +569,7 @@ class FileChunkTransportRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final ShardActiveRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final ShardActiveRequest request, final TransportChannel channel, Task task) throws Exception { IndexShard indexShard = getShard(request); // make sure shard is really there before register cluster state observer diff --git a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java index ba3f9c048d08a..380ae97408016 100644 --- a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoriesService.VerifyResponse; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -146,7 +147,7 @@ public void writeTo(StreamOutput out) throws IOException { class VerifyNodeRepositoryRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(VerifyNodeRepositoryRequest request, TransportChannel channel) throws Exception { + public void messageReceived(VerifyNodeRepositoryRequest request, TransportChannel channel, Task task) throws Exception { DiscoveryNode localNode = clusterService.state().nodes().getLocalNode(); try { doVerify(request.repository, request.verificationToken, localNode); diff --git a/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java b/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java index 91b54ab8f2097..4e09daf9ccf0a 100644 --- a/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java +++ b/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java @@ -59,7 +59,7 @@ public Request newRequest(StreamInput in) throws IOException { public void processMessageReceived(Request request, TransportChannel channel) throws Exception { final Task task = taskManager.register(channel.getChannelType(), action, request); if (task == null) { - handler.messageReceived(request, channel); + handler.messageReceived(request, channel, null); } else { boolean success = false; try { diff --git a/server/src/main/java/org/elasticsearch/transport/TaskAwareTransportRequestHandler.java b/server/src/main/java/org/elasticsearch/transport/TaskAwareTransportRequestHandler.java deleted file mode 100644 index 12899d86d430d..0000000000000 --- a/server/src/main/java/org/elasticsearch/transport/TaskAwareTransportRequestHandler.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.transport; - -/** - * Transport request handlers that is using task context - */ -public abstract class TaskAwareTransportRequestHandler implements TransportRequestHandler { - @Override - public final void messageReceived(T request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required"); - } -} diff --git a/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java b/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java index 8c48f08874350..a17509e826003 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -52,7 +53,7 @@ private static class ProxyRequestHandler implements Tran } @Override - public void messageReceived(T request, TransportChannel channel) throws Exception { + public void messageReceived(T request, TransportChannel channel, Task task) throws Exception { DiscoveryNode targetNode = request.targetNode; TransportRequest wrappedRequest = request.wrapped; service.sendRequest(targetNode, action, wrappedRequest, diff --git a/server/src/main/java/org/elasticsearch/transport/TransportRequestHandler.java b/server/src/main/java/org/elasticsearch/transport/TransportRequestHandler.java index 8c90b82fe7c45..be95798806847 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportRequestHandler.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportRequestHandler.java @@ -23,12 +23,5 @@ public interface TransportRequestHandler { - /** - * Override this method if access to the Task parameter is needed - */ - default void messageReceived(final T request, final TransportChannel channel, Task task) throws Exception { - messageReceived(request, channel); - } - - void messageReceived(T request, TransportChannel channel) throws Exception; + void messageReceived(T request, TransportChannel channel, Task task) throws Exception; } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 656d8c3841769..8d3929cd6615a 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -231,7 +231,7 @@ protected void doStart() { () -> HandshakeRequest.INSTANCE, ThreadPool.Names.SAME, false, false, - (request, channel) -> channel.sendResponse( + (request, channel, task) -> channel.sendResponse( new HandshakeResponse(localNode, clusterName, localNode.getVersion()))); if (connectToRemoteCluster) { // here we start to connect to the remote clusters diff --git a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java index 8fac0b91cd6d6..40795bff730e0 100644 --- a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java @@ -779,11 +779,6 @@ public void messageReceived(T request, TransportChannel channel, Task task) thro } requestHandler.messageReceived(request, channel, task); } - - @Override - public void messageReceived(T request, TransportChannel channel) throws Exception { - messageReceived(request, channel, null); - } } } } diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 61beb59bc0c24..fdc3d890363ad 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -364,7 +364,7 @@ public void testOperationExecution() throws Exception { TestTransportChannel channel = new TestTransportChannel(); - handler.messageReceived(action.new NodeRequest(nodeId, new Request(), new ArrayList<>(shards)), channel); + handler.messageReceived(action.new NodeRequest(nodeId, new Request(), new ArrayList<>(shards)), channel, null); // check the operation was executed only on the expected shards assertEquals(shards, action.getResults().keySet()); diff --git a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index c8030e1cf4aee..2beaed1e106e4 100644 --- a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.node.Node; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; @@ -469,7 +470,7 @@ class MockHandler implements TransportRequestHandler { } @Override - public void messageReceived(ClusterStateRequest request, TransportChannel channel) throws Exception { + public void messageReceived(ClusterStateRequest request, TransportChannel channel, Task task) throws Exception { if (block.get()) { release.await(); return; diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java index a60a23bcd6d5c..6dbf80d9be675 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java @@ -368,7 +368,7 @@ public void testValidateOnUnsupportedIndexVersionCreated() throws Exception { .routingTable(RoutingTable.builder().add(indexRoutingTable).build()); if (incompatible) { IllegalStateException ex = expectThrows(IllegalStateException.class, () -> - request.messageReceived(new MembershipAction.ValidateJoinRequest(stateBuilder.build()), null)); + request.messageReceived(new MembershipAction.ValidateJoinRequest(stateBuilder.build()), null, null)); assertEquals("index [test] version not supported: " + VersionUtils.getPreviousVersion(Version.CURRENT.minimumIndexCompatibilityVersion()) + " minimum compatible index version is: " + Version.CURRENT.minimumIndexCompatibilityVersion(), ex.getMessage()); @@ -400,7 +400,7 @@ public void sendResponse(TransportResponse response, TransportResponseOptions op public void sendResponse(Exception exception) throws IOException { } - }); + }, null); assertTrue(sendResponse.get()); } } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 637b8fb26a880..0369eda2a8899 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -114,7 +114,7 @@ public static MockTransportService startTransport( MockTransportService newService = MockTransportService.createNewService(s, version, threadPool, null); try { newService.registerRequestHandler(ClusterSearchShardsAction.NAME,ThreadPool.Names.SAME, ClusterSearchShardsRequest::new, - (request, channel) -> { + (request, channel, task) -> { if ("index_not_found".equals(request.preference())) { channel.sendResponse(new IndexNotFoundException("index")); } else { @@ -123,7 +123,7 @@ public static MockTransportService startTransport( } }); newService.registerRequestHandler(ClusterStateAction.NAME, ThreadPool.Names.SAME, ClusterStateRequest::new, - (request, channel) -> { + (request, channel, task) -> { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); for (DiscoveryNode node : knownNodes) { builder.add(node); diff --git a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java index 3f4ae7bdd2d76..491ba123a451d 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.transport; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; @@ -26,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; @@ -88,7 +88,7 @@ private MockTransportService buildService(final Version version) { public void testSendMessage() throws InterruptedException { serviceA.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_A"; @@ -98,7 +98,7 @@ public void testSendMessage() throws InterruptedException { serviceA.connectToNode(nodeB); serviceB.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_B"; @@ -107,7 +107,7 @@ public void testSendMessage() throws InterruptedException { TransportActionProxy.registerProxyAction(serviceB, "/test", SimpleTestResponse::new); serviceB.connectToNode(nodeC); serviceC.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_C"; @@ -151,7 +151,7 @@ public String executor() { public void testException() throws InterruptedException { serviceA.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_A"; @@ -161,7 +161,7 @@ public void testException() throws InterruptedException { serviceA.connectToNode(nodeB); serviceB.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_B"; @@ -170,7 +170,7 @@ public void testException() throws InterruptedException { TransportActionProxy.registerProxyAction(serviceB, "/test", SimpleTestResponse::new); serviceB.connectToNode(nodeC); serviceC.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { throw new ElasticsearchException("greetings from TS_C"); }); TransportActionProxy.registerProxyAction(serviceC, "/test", SimpleTestResponse::new); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 587c192beb2d6..0b676e1403481 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -47,6 +47,7 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.mocksocket.MockServerSocket; import org.elasticsearch.node.Node; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.transport.MockTransportService; @@ -205,7 +206,7 @@ public void assertNoPendingHandshakes(Transport transport) { public void testHelloWorld() { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { assertThat("moshe", equalTo(request.message)); try { channel.sendResponse(new StringMessageResponse("hello " + request.message)); @@ -280,7 +281,7 @@ public void handleException(TransportException exp) { public void testThreadContext() throws ExecutionException, InterruptedException { - serviceA.registerRequestHandler("ping_pong", StringMessageRequest::new, ThreadPool.Names.GENERIC, (request, channel) -> { + serviceA.registerRequestHandler("ping_pong", StringMessageRequest::new, ThreadPool.Names.GENERIC, (request, channel, task) -> { assertEquals("ping_user", threadPool.getThreadContext().getHeader("test.ping.user")); assertNull(threadPool.getThreadContext().getTransient("my_private_context")); try { @@ -339,7 +340,7 @@ public void testLocalNodeConnection() throws InterruptedException { serviceA.disconnectFromNode(nodeA); final AtomicReference exception = new AtomicReference<>(); serviceA.registerRequestHandler("localNode", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { try { channel.sendResponse(new StringMessageResponse(request.message)); } catch (IOException e) { @@ -377,7 +378,7 @@ public String executor() { } public void testAdapterSendReceiveCallbacks() throws Exception { - final TransportRequestHandler requestHandler = (request, channel) -> { + final TransportRequestHandler requestHandler = (request, channel, task) -> { try { if (randomBoolean()) { channel.sendResponse(TransportResponse.Empty.INSTANCE); @@ -485,7 +486,7 @@ public void requestSent(DiscoveryNode node, long requestId, String action, Trans public void testVoidMessageCompressed() { serviceA.registerRequestHandler("sayHello", TransportRequest.Empty::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { try { TransportResponseOptions responseOptions = TransportResponseOptions.builder().withCompress(true).build(); channel.sendResponse(TransportResponse.Empty.INSTANCE, responseOptions); @@ -531,7 +532,7 @@ public void testHelloWorldCompressed() { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) { assertThat("moshe", equalTo(request.message)); try { TransportResponseOptions responseOptions = TransportResponseOptions.builder().withCompress(true).build(); @@ -580,7 +581,7 @@ public void testErrorMessage() { serviceA.registerRequestHandler("sayHelloException", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) throws Exception { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) throws Exception { assertThat("moshe", equalTo(request.message)); throw new RuntimeException("bad message !!!"); } @@ -639,7 +640,7 @@ public void testConcurrentSendRespondAndDisconnect() throws BrokenBarrierExcepti Set sendingErrors = ConcurrentCollections.newConcurrentSet(); Set responseErrors = ConcurrentCollections.newConcurrentSet(); serviceA.registerRequestHandler("test", TestRequest::new, - randomBoolean() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC, (request, channel) -> { + randomBoolean() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC, (request, channel, task) -> { try { channel.sendResponse(new TestResponse()); } catch (Exception e) { @@ -647,7 +648,7 @@ public void testConcurrentSendRespondAndDisconnect() throws BrokenBarrierExcepti responseErrors.add(e); } }); - final TransportRequestHandler ignoringRequestHandler = (request, channel) -> { + final TransportRequestHandler ignoringRequestHandler = (request, channel, task) -> { try { channel.sendResponse(new TestResponse()); } catch (Exception e) { @@ -763,7 +764,7 @@ public void testNotifyOnShutdown() throws Exception { final CountDownLatch latch2 = new CountDownLatch(1); try { serviceA.registerRequestHandler("foobar", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { try { latch2.await(); logger.info("Stop ServiceB now"); @@ -791,7 +792,7 @@ public void testTimeoutSendExceptionWithNeverSendingBackResponse() throws Except serviceA.registerRequestHandler("sayHelloTimeoutNoResponse", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) { assertThat("moshe", equalTo(request.message)); // don't send back a response } @@ -836,7 +837,7 @@ public void testTimeoutSendExceptionWithDelayedResponse() throws Exception { serviceA.registerRequestHandler("sayHelloTimeoutDelayedResponse", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) throws InterruptedException { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) throws InterruptedException { String message = request.message; inFlight.acquireUninterruptibly(); try { @@ -938,10 +939,10 @@ public void handleException(TransportException exp) { } public void testTracerLog() throws InterruptedException { - TransportRequestHandler handler = (request, channel) -> channel.sendResponse(new StringMessageResponse("")); + TransportRequestHandler handler = (request, channel, task) -> channel.sendResponse(new StringMessageResponse("")); TransportRequestHandler handlerWithError = new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) throws Exception { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) throws Exception { if (request.timeout() > 0) { Thread.sleep(request.timeout); } @@ -1257,7 +1258,7 @@ public void testVersionFrom0to1() throws Exception { serviceB.registerRequestHandler("/version", Version1Request::new, ThreadPool.Names.SAME, new TransportRequestHandler() { @Override - public void messageReceived(Version1Request request, TransportChannel channel) throws Exception { + public void messageReceived(Version1Request request, TransportChannel channel, Task task) throws Exception { assertThat(request.value1, equalTo(1)); assertThat(request.value2, equalTo(0)); // not set, coming from service A Version1Response response = new Version1Response(); @@ -1301,7 +1302,7 @@ public void testVersionFrom1to0() throws Exception { serviceA.registerRequestHandler("/version", Version0Request::new, ThreadPool.Names.SAME, new TransportRequestHandler() { @Override - public void messageReceived(Version0Request request, TransportChannel channel) throws Exception { + public void messageReceived(Version0Request request, TransportChannel channel, Task task) throws Exception { assertThat(request.value1, equalTo(1)); Version0Response response = new Version0Response(); response.value1 = 1; @@ -1344,7 +1345,7 @@ public String executor() { public void testVersionFrom1to1() throws Exception { serviceB.registerRequestHandler("/version", Version1Request::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertThat(request.value1, equalTo(1)); assertThat(request.value2, equalTo(2)); Version1Response response = new Version1Response(); @@ -1388,7 +1389,7 @@ public String executor() { public void testVersionFrom0to0() throws Exception { serviceA.registerRequestHandler("/version", Version0Request::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertThat(request.value1, equalTo(1)); Version0Response response = new Version0Response(); response.value1 = 1; @@ -1427,7 +1428,7 @@ public String executor() { public void testMockFailToSendNoConnectRule() throws Exception { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { assertThat("moshe", equalTo(request.message)); throw new RuntimeException("bad message !!!"); }); @@ -1484,7 +1485,7 @@ public void handleException(TransportException exp) { public void testMockUnresponsiveRule() throws IOException { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { assertThat("moshe", equalTo(request.message)); throw new RuntimeException("bad message !!!"); }); @@ -1540,7 +1541,7 @@ public void testHostOnMessages() throws InterruptedException { final AtomicReference addressB = new AtomicReference<>(); serviceB.registerRequestHandler("action1", TestRequest::new, ThreadPool.Names.SAME, new TransportRequestHandler() { @Override - public void messageReceived(TestRequest request, TransportChannel channel) throws Exception { + public void messageReceived(TestRequest request, TransportChannel channel, Task task) throws Exception { addressA.set(request.remoteAddress()); channel.sendResponse(new TestResponse()); latch.countDown(); @@ -1582,7 +1583,7 @@ public void testBlockingIncomingRequests() throws Exception { Settings.EMPTY, false, false)) { AtomicBoolean requestProcessed = new AtomicBoolean(false); service.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { requestProcessed.set(true); channel.sendResponse(TransportResponse.Empty.INSTANCE); }); @@ -1744,7 +1745,7 @@ class TestRequestHandler implements TransportRequestHandler { } @Override - public void messageReceived(TestRequest request, TransportChannel channel) throws Exception { + public void messageReceived(TestRequest request, TransportChannel channel, Task task) throws Exception { if (randomBoolean()) { Thread.sleep(randomIntBetween(10, 50)); } @@ -1868,18 +1869,18 @@ public String executor() { public void testRegisterHandlerTwice() { serviceB.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), - (request, message) -> { + (request, message, task) -> { throw new AssertionError("boom"); }); expectThrows(IllegalArgumentException.class, () -> serviceB.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), - (request, message) -> { + (request, message, task) -> { throw new AssertionError("boom"); }) ); serviceA.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), - (request, message) -> { + (request, message, task) -> { throw new AssertionError("boom"); }); } @@ -2066,7 +2067,7 @@ public void testResponseHeadersArePreserved() throws InterruptedException { List executors = new ArrayList<>(ThreadPool.THREAD_POOL_TYPES.keySet()); CollectionUtil.timSort(executors); // makes sure it's reproducible serviceA.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { threadPool.getThreadContext().putTransient("boom", new Object()); threadPool.getThreadContext().addResponseHeader("foo.bar", "baz"); @@ -2127,7 +2128,7 @@ public void testHandlerIsInvokedOnConnectionClose() throws IOException, Interrup CollectionUtil.timSort(executors); // makes sure it's reproducible TransportService serviceC = build(Settings.builder().put("name", "TS_TEST").build(), version0, null, true); serviceC.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { // do nothing }); serviceC.start(); @@ -2187,7 +2188,7 @@ public void testConcurrentDisconnectOnNonPublishedConnection() throws IOExceptio CountDownLatch receivedLatch = new CountDownLatch(1); CountDownLatch sendResponseLatch = new CountDownLatch(1); serviceC.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { // don't block on a network thread here threadPool.generic().execute(new AbstractRunnable() { @Override @@ -2255,7 +2256,7 @@ public void testTransportStats() throws Exception { CountDownLatch receivedLatch = new CountDownLatch(1); CountDownLatch sendResponseLatch = new CountDownLatch(1); serviceB.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { // don't block on a network thread here threadPool.generic().execute(new AbstractRunnable() { @Override @@ -2368,7 +2369,7 @@ public void testTransportStatsWithException() throws Exception { Exception ex = new RuntimeException("boom"); ex.setStackTrace(new StackTraceElement[0]); serviceB.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { // don't block on a network thread here threadPool.generic().execute(new AbstractRunnable() { @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index a9f3dc5a1b786..239be32033f13 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -399,11 +399,6 @@ static RollupSearchContext separateIndices(String[] indices, ImmutableOpenMap { - @Override - public final void messageReceived(SearchRequest request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override public final void messageReceived(final SearchRequest request, final TransportChannel channel, Task task) throws Exception { // We already got the task created on the network layer - no need to create it again on the transport layer diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java index 7de3e5d0980d6..55287d5d50387 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java @@ -318,10 +318,5 @@ public void messageReceived(T request, TransportChannel channel, Task task) thro } } } - - @Override - public void messageReceived(T request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("task parameter is required for this operation"); - } } } From 16e4e7a7cfb5196e02e6fc988f34553dc9d34acc Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 22 Jun 2018 17:15:29 +0200 Subject: [PATCH 25/34] Node selector per client rather than per request (#31471) We have made node selectors configurable per request, but all of other language clients don't allow for that. A good reason not to do so, is that having a different node selector per request breaks round-robin. This commit makes NodeSelector configurable only at client initialization. It also improves the docs on this matter, important given that a single node selector can still affect round-robin. --- .../elasticsearch/client/NodeSelector.java | 6 +-- .../elasticsearch/client/RequestOptions.java | 35 ++----------- .../org/elasticsearch/client/RestClient.java | 16 +++--- .../client/RestClientBuilder.java | 14 ++++- .../client/NodeSelectorTests.java | 2 +- .../client/RequestOptionsTests.java | 9 +--- .../RestClientMultipleHostsIntegTests.java | 46 ++++++++--------- .../client/RestClientMultipleHostsTests.java | 33 +++++------- .../client/RestClientSingleHostTests.java | 2 +- .../elasticsearch/client/RestClientTests.java | 4 +- .../RestClientDocumentation.java | 51 +++++++++++++++---- .../low-level/configuration.asciidoc | 27 ++++++++++ docs/java-rest/low-level/usage.asciidoc | 22 ++++---- .../smoketest/DocsClientYamlTestSuiteIT.java | 5 +- .../test/rest/ESRestTestCase.java | 12 +++-- .../rest/yaml/ClientYamlDocsTestClient.java | 11 ++-- .../test/rest/yaml/ClientYamlTestClient.java | 38 ++++++++++---- .../rest/yaml/ESClientYamlSuiteTestCase.java | 13 ++--- .../section/ClientYamlTestSectionTests.java | 4 +- .../smoketest/XDocsClientYamlTestSuiteIT.java | 6 +-- 20 files changed, 208 insertions(+), 148 deletions(-) diff --git a/client/rest/src/main/java/org/elasticsearch/client/NodeSelector.java b/client/rest/src/main/java/org/elasticsearch/client/NodeSelector.java index 5f5296fe16b13..b3efa08befaf8 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/NodeSelector.java +++ b/client/rest/src/main/java/org/elasticsearch/client/NodeSelector.java @@ -24,7 +24,7 @@ /** * Selects nodes that can receive requests. Used to keep requests away * from master nodes or to send them to nodes with a particular attribute. - * Use with {@link RequestOptions.Builder#setNodeSelector(NodeSelector)}. + * Use with {@link RestClientBuilder#setNodeSelector(NodeSelector)}. */ public interface NodeSelector { /** @@ -68,7 +68,7 @@ public String toString() { * have the {@code master} role OR it has the data {@code data} * role. */ - NodeSelector NOT_MASTER_ONLY = new NodeSelector() { + NodeSelector SKIP_DEDICATED_MASTERS = new NodeSelector() { @Override public void select(Iterable nodes) { for (Iterator itr = nodes.iterator(); itr.hasNext();) { @@ -84,7 +84,7 @@ public void select(Iterable nodes) { @Override public String toString() { - return "NOT_MASTER_ONLY"; + return "SKIP_DEDICATED_MASTERS"; } }; } diff --git a/client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java b/client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java index 97d150da3d3ff..cf6bd3d49f59e 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java @@ -37,22 +37,18 @@ */ public final class RequestOptions { public static final RequestOptions DEFAULT = new Builder( - Collections.
emptyList(), NodeSelector.ANY, - HeapBufferedResponseConsumerFactory.DEFAULT).build(); + Collections.
emptyList(), HeapBufferedResponseConsumerFactory.DEFAULT).build(); private final List
headers; - private final NodeSelector nodeSelector; private final HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory; private RequestOptions(Builder builder) { this.headers = Collections.unmodifiableList(new ArrayList<>(builder.headers)); - this.nodeSelector = builder.nodeSelector; this.httpAsyncResponseConsumerFactory = builder.httpAsyncResponseConsumerFactory; } public Builder toBuilder() { - Builder builder = new Builder(headers, nodeSelector, httpAsyncResponseConsumerFactory); - return builder; + return new Builder(headers, httpAsyncResponseConsumerFactory); } /** @@ -62,14 +58,6 @@ public List
getHeaders() { return headers; } - /** - * The selector that chooses which nodes are valid destinations for - * {@link Request}s with these options. - */ - public NodeSelector getNodeSelector() { - return nodeSelector; - } - /** * The {@link HttpAsyncResponseConsumerFactory} used to create one * {@link HttpAsyncResponseConsumer} callback per retry. Controls how the @@ -93,9 +81,6 @@ public String toString() { b.append(headers.get(h).toString()); } } - if (nodeSelector != NodeSelector.ANY) { - b.append(", nodeSelector=").append(nodeSelector); - } if (httpAsyncResponseConsumerFactory != HttpAsyncResponseConsumerFactory.DEFAULT) { b.append(", consumerFactory=").append(httpAsyncResponseConsumerFactory); } @@ -113,24 +98,20 @@ public boolean equals(Object obj) { RequestOptions other = (RequestOptions) obj; return headers.equals(other.headers) - && nodeSelector.equals(other.nodeSelector) && httpAsyncResponseConsumerFactory.equals(other.httpAsyncResponseConsumerFactory); } @Override public int hashCode() { - return Objects.hash(headers, nodeSelector, httpAsyncResponseConsumerFactory); + return Objects.hash(headers, httpAsyncResponseConsumerFactory); } public static class Builder { private final List
headers; - private NodeSelector nodeSelector; private HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory; - private Builder(List
headers, NodeSelector nodeSelector, - HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory) { + private Builder(List
headers, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory) { this.headers = new ArrayList<>(headers); - this.nodeSelector = nodeSelector; this.httpAsyncResponseConsumerFactory = httpAsyncResponseConsumerFactory; } @@ -150,14 +131,6 @@ public void addHeader(String name, String value) { this.headers.add(new ReqHeader(name, value)); } - /** - * Configure the selector that chooses which nodes are valid - * destinations for {@link Request}s with these options - */ - public void setNodeSelector(NodeSelector nodeSelector) { - this.nodeSelector = Objects.requireNonNull(nodeSelector, "nodeSelector cannot be null"); - } - /** * Set the {@link HttpAsyncResponseConsumerFactory} used to create one * {@link HttpAsyncResponseConsumer} callback per retry. Controls how the diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 82039cab5d04c..77c11db455e47 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -48,6 +48,7 @@ import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; import org.elasticsearch.client.DeadHostState.TimeSupplier; +import javax.net.ssl.SSLHandshakeException; import java.io.Closeable; import java.io.IOException; import java.net.ConnectException; @@ -74,7 +75,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import javax.net.ssl.SSLHandshakeException; import static java.util.Collections.singletonList; @@ -108,15 +108,17 @@ public class RestClient implements Closeable { private final AtomicInteger lastNodeIndex = new AtomicInteger(0); private final ConcurrentMap blacklist = new ConcurrentHashMap<>(); private final FailureListener failureListener; + private final NodeSelector nodeSelector; private volatile NodeTuple> nodeTuple; RestClient(CloseableHttpAsyncClient client, long maxRetryTimeoutMillis, Header[] defaultHeaders, - List nodes, String pathPrefix, FailureListener failureListener) { + List nodes, String pathPrefix, FailureListener failureListener, NodeSelector nodeSelector) { this.client = client; this.maxRetryTimeoutMillis = maxRetryTimeoutMillis; this.defaultHeaders = Collections.unmodifiableList(Arrays.asList(defaultHeaders)); this.failureListener = failureListener; this.pathPrefix = pathPrefix; + this.nodeSelector = nodeSelector; setNodes(nodes); } @@ -146,7 +148,7 @@ public static RestClientBuilder builder(HttpHost... hosts) { /** * Replaces the hosts with which the client communicates. * - * @deprecated prefer {@link setNodes} because it allows you + * @deprecated prefer {@link #setNodes(Collection)} because it allows you * to set metadata for use with {@link NodeSelector}s */ @Deprecated @@ -180,8 +182,8 @@ private static List hostsToNodes(HttpHost[] hosts) { throw new IllegalArgumentException("hosts must not be null nor empty"); } List nodes = new ArrayList<>(hosts.length); - for (int i = 0; i < hosts.length; i++) { - nodes.add(new Node(hosts[i])); + for (HttpHost host : hosts) { + nodes.add(new Node(host)); } return nodes; } @@ -509,7 +511,7 @@ void performRequestAsyncNoCatch(Request request, ResponseListener listener) thro setHeaders(httpRequest, request.getOptions().getHeaders()); FailureTrackingResponseListener failureTrackingResponseListener = new FailureTrackingResponseListener(listener); long startTime = System.nanoTime(); - performRequestAsync(startTime, nextNode(request.getOptions().getNodeSelector()), httpRequest, ignoreErrorCodes, + performRequestAsync(startTime, nextNode(), httpRequest, ignoreErrorCodes, request.getOptions().getHttpAsyncResponseConsumerFactory(), failureTrackingResponseListener); } @@ -611,7 +613,7 @@ private void setHeaders(HttpRequest httpRequest, Collection
requestHeade * that is closest to being revived. * @throws IOException if no nodes are available */ - private NodeTuple> nextNode(NodeSelector nodeSelector) throws IOException { + private NodeTuple> nextNode() throws IOException { NodeTuple> nodeTuple = this.nodeTuple; List hosts = selectHosts(nodeTuple, blacklist, lastNodeIndex, nodeSelector); return new NodeTuple<>(hosts.iterator(), nodeTuple.authCache); diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java index 17d27248dfea9..fb61f4f17c483 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java @@ -55,6 +55,7 @@ public final class RestClientBuilder { private HttpClientConfigCallback httpClientConfigCallback; private RequestConfigCallback requestConfigCallback; private String pathPrefix; + private NodeSelector nodeSelector = NodeSelector.ANY; /** * Creates a new builder instance and sets the hosts that the client will send requests to. @@ -173,6 +174,16 @@ public RestClientBuilder setPathPrefix(String pathPrefix) { return this; } + /** + * Sets the {@link NodeSelector} to be used for all requests. + * @throws NullPointerException if the provided nodeSelector is null + */ + public RestClientBuilder setNodeSelector(NodeSelector nodeSelector) { + Objects.requireNonNull(nodeSelector, "nodeSelector must not be null"); + this.nodeSelector = nodeSelector; + return this; + } + /** * Creates a new {@link RestClient} based on the provided configuration. */ @@ -186,7 +197,8 @@ public CloseableHttpAsyncClient run() { return createHttpClient(); } }); - RestClient restClient = new RestClient(httpClient, maxRetryTimeout, defaultHeaders, nodes, pathPrefix, failureListener); + RestClient restClient = new RestClient(httpClient, maxRetryTimeout, defaultHeaders, nodes, + pathPrefix, failureListener, nodeSelector); httpClient.start(); return restClient; } diff --git a/client/rest/src/test/java/org/elasticsearch/client/NodeSelectorTests.java b/client/rest/src/test/java/org/elasticsearch/client/NodeSelectorTests.java index 868ccdcab757d..83027db325b0b 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/NodeSelectorTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/NodeSelectorTests.java @@ -59,7 +59,7 @@ public void testNotMasterOnly() { Collections.shuffle(nodes, getRandom()); List expected = new ArrayList<>(nodes); expected.remove(masterOnly); - NodeSelector.NOT_MASTER_ONLY.select(nodes); + NodeSelector.SKIP_DEDICATED_MASTERS.select(nodes); assertEquals(expected, nodes); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RequestOptionsTests.java b/client/rest/src/test/java/org/elasticsearch/client/RequestOptionsTests.java index a78be6c126bae..19106792228d9 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RequestOptionsTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RequestOptionsTests.java @@ -114,10 +114,6 @@ static RequestOptions.Builder randomBuilder() { } } - if (randomBoolean()) { - builder.setNodeSelector(mock(NodeSelector.class)); - } - if (randomBoolean()) { builder.setHttpAsyncResponseConsumerFactory(new HeapBufferedResponseConsumerFactory(1)); } @@ -131,15 +127,12 @@ private static RequestOptions copy(RequestOptions options) { private static RequestOptions mutate(RequestOptions options) { RequestOptions.Builder mutant = options.toBuilder(); - int mutationType = between(0, 2); + int mutationType = between(0, 1); switch (mutationType) { case 0: mutant.addHeader("extra", "m"); return mutant.build(); case 1: - mutant.setNodeSelector(mock(NodeSelector.class)); - return mutant.build(); - case 2: mutant.setHttpAsyncResponseConsumerFactory(new HeapBufferedResponseConsumerFactory(5)); return mutant.build(); default: diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java index 7f5915fe3529d..272859e8441e3 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java @@ -75,14 +75,15 @@ public static void startHttpServer() throws Exception { httpServers[i] = httpServer; httpHosts[i] = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()); } - restClient = buildRestClient(); + restClient = buildRestClient(NodeSelector.ANY); } - private static RestClient buildRestClient() { + private static RestClient buildRestClient(NodeSelector nodeSelector) { RestClientBuilder restClientBuilder = RestClient.builder(httpHosts); if (pathPrefix.length() > 0) { restClientBuilder.setPathPrefix((randomBoolean() ? "/" : "") + pathPrefixWithoutLeadingSlash); } + restClientBuilder.setNodeSelector(nodeSelector); return restClientBuilder.build(); } @@ -199,29 +200,28 @@ public void onFailure(Exception exception) { * test what happens after calling */ public void testNodeSelector() throws IOException { - Request request = new Request("GET", "/200"); - RequestOptions.Builder options = request.getOptions().toBuilder(); - options.setNodeSelector(firstPositionNodeSelector()); - request.setOptions(options); - int rounds = between(1, 10); - for (int i = 0; i < rounds; i++) { - /* - * Run the request more than once to verify that the - * NodeSelector overrides the round robin behavior. - */ - if (stoppedFirstHost) { - try { - restClient.performRequest(request); - fail("expected to fail to connect"); - } catch (ConnectException e) { - // Windows isn't consistent here. Sometimes the message is even null! - if (false == System.getProperty("os.name").startsWith("Windows")) { - assertEquals("Connection refused", e.getMessage()); + try (RestClient restClient = buildRestClient(firstPositionNodeSelector())) { + Request request = new Request("GET", "/200"); + int rounds = between(1, 10); + for (int i = 0; i < rounds; i++) { + /* + * Run the request more than once to verify that the + * NodeSelector overrides the round robin behavior. + */ + if (stoppedFirstHost) { + try { + restClient.performRequest(request); + fail("expected to fail to connect"); + } catch (ConnectException e) { + // Windows isn't consistent here. Sometimes the message is even null! + if (false == System.getProperty("os.name").startsWith("Windows")) { + assertEquals("Connection refused", e.getMessage()); + } } + } else { + Response response = restClient.performRequest(request); + assertEquals(httpHosts[0], response.getHost()); } - } else { - Response response = restClient.performRequest(request); - assertEquals(httpHosts[0], response.getHost()); } } } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java index d04b3cbb7554e..e1062076a0dbf 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java @@ -35,9 +35,7 @@ import org.apache.http.message.BasicStatusLine; import org.apache.http.nio.protocol.HttpAsyncRequestProducer; import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; -import org.elasticsearch.client.Node.Roles; import org.junit.After; -import org.junit.Before; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -74,13 +72,11 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { private ExecutorService exec = Executors.newFixedThreadPool(1); - private RestClient restClient; private List nodes; private HostsTrackingFailureListener failureListener; - @Before @SuppressWarnings("unchecked") - public void createRestClient() throws IOException { + public RestClient createRestClient(NodeSelector nodeSelector) { CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class); when(httpClient.execute(any(HttpAsyncRequestProducer.class), any(HttpAsyncResponseConsumer.class), any(HttpClientContext.class), any(FutureCallback.class))).thenAnswer(new Answer>() { @@ -119,7 +115,7 @@ public void run() { } nodes = Collections.unmodifiableList(nodes); failureListener = new HostsTrackingFailureListener(); - restClient = new RestClient(httpClient, 10000, new Header[0], nodes, null, failureListener); + return new RestClient(httpClient, 10000, new Header[0], nodes, null, failureListener, nodeSelector); } /** @@ -131,12 +127,13 @@ public void shutdownExec() { } public void testRoundRobinOkStatusCodes() throws IOException { + RestClient restClient = createRestClient(NodeSelector.ANY); int numIters = RandomNumbers.randomIntBetween(getRandom(), 1, 5); for (int i = 0; i < numIters; i++) { Set hostsSet = hostsSet(); for (int j = 0; j < nodes.size(); j++) { int statusCode = randomOkStatusCode(getRandom()); - Response response = restClient.performRequest(randomHttpMethod(getRandom()), "/" + statusCode); + Response response = restClient.performRequest(new Request(randomHttpMethod(getRandom()), "/" + statusCode)); assertEquals(statusCode, response.getStatusLine().getStatusCode()); assertTrue("host not found: " + response.getHost(), hostsSet.remove(response.getHost())); } @@ -146,6 +143,7 @@ public void testRoundRobinOkStatusCodes() throws IOException { } public void testRoundRobinNoRetryErrors() throws IOException { + RestClient restClient = createRestClient(NodeSelector.ANY); int numIters = RandomNumbers.randomIntBetween(getRandom(), 1, 5); for (int i = 0; i < numIters; i++) { Set hostsSet = hostsSet(); @@ -153,7 +151,7 @@ public void testRoundRobinNoRetryErrors() throws IOException { String method = randomHttpMethod(getRandom()); int statusCode = randomErrorNoRetryStatusCode(getRandom()); try { - Response response = restClient.performRequest(method, "/" + statusCode); + Response response = restClient.performRequest(new Request(method, "/" + statusCode)); if (method.equals("HEAD") && statusCode == 404) { //no exception gets thrown although we got a 404 assertEquals(404, response.getStatusLine().getStatusCode()); @@ -178,9 +176,10 @@ public void testRoundRobinNoRetryErrors() throws IOException { } public void testRoundRobinRetryErrors() throws IOException { + RestClient restClient = createRestClient(NodeSelector.ANY); String retryEndpoint = randomErrorRetryEndpoint(); try { - restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); + restClient.performRequest(new Request(randomHttpMethod(getRandom()), retryEndpoint)); fail("request should have failed"); } catch (ResponseException e) { /* @@ -237,7 +236,7 @@ public void testRoundRobinRetryErrors() throws IOException { for (int j = 0; j < nodes.size(); j++) { retryEndpoint = randomErrorRetryEndpoint(); try { - restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); + restClient.performRequest(new Request(randomHttpMethod(getRandom()), retryEndpoint)); fail("request should have failed"); } catch (ResponseException e) { Response response = e.getResponse(); @@ -269,7 +268,7 @@ public void testRoundRobinRetryErrors() throws IOException { int statusCode = randomErrorNoRetryStatusCode(getRandom()); Response response; try { - response = restClient.performRequest(randomHttpMethod(getRandom()), "/" + statusCode); + response = restClient.performRequest(new Request(randomHttpMethod(getRandom()), "/" + statusCode)); } catch (ResponseException e) { response = e.getResponse(); } @@ -286,7 +285,7 @@ public void testRoundRobinRetryErrors() throws IOException { for (int y = 0; y < i + 1; y++) { retryEndpoint = randomErrorRetryEndpoint(); try { - restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); + restClient.performRequest(new Request(randomHttpMethod(getRandom()), retryEndpoint)); fail("request should have failed"); } catch (ResponseException e) { Response response = e.getResponse(); @@ -323,6 +322,7 @@ public void select(Iterable restClientNodes) { assertTrue(found); } }; + RestClient restClient = createRestClient(firstPositionOnly); int rounds = between(1, 10); for (int i = 0; i < rounds; i++) { /* @@ -330,18 +330,16 @@ public void select(Iterable restClientNodes) { * NodeSelector overrides the round robin behavior. */ Request request = new Request("GET", "/200"); - RequestOptions.Builder options = request.getOptions().toBuilder(); - options.setNodeSelector(firstPositionOnly); - request.setOptions(options); Response response = restClient.performRequest(request); assertEquals(nodes.get(0).getHost(), response.getHost()); } } public void testSetNodes() throws IOException { + RestClient restClient = createRestClient(NodeSelector.SKIP_DEDICATED_MASTERS); List newNodes = new ArrayList<>(nodes.size()); for (int i = 0; i < nodes.size(); i++) { - Roles roles = i == 0 ? new Roles(false, true, true) : new Roles(true, false, false); + Node.Roles roles = i == 0 ? new Node.Roles(false, true, true) : new Node.Roles(true, false, false); newNodes.add(new Node(nodes.get(i).getHost(), null, null, null, roles, null)); } restClient.setNodes(newNodes); @@ -352,9 +350,6 @@ public void testSetNodes() throws IOException { * NodeSelector overrides the round robin behavior. */ Request request = new Request("GET", "/200"); - RequestOptions.Builder options = request.getOptions().toBuilder(); - options.setNodeSelector(NodeSelector.NOT_MASTER_ONLY); - request.setOptions(options); Response response = restClient.performRequest(request); assertEquals(newNodes.get(0).getHost(), response.getHost()); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index 5987fe7dd9849..6b7725666d42d 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -150,7 +150,7 @@ public void run() { node = new Node(new HttpHost("localhost", 9200)); failureListener = new HostsTrackingFailureListener(); restClient = new RestClient(httpClient, 10000, defaultHeaders, - singletonList(node), null, failureListener); + singletonList(node), null, failureListener, NodeSelector.ANY); } /** diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java index 04742ccab4f32..030c2fca6272a 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java @@ -54,7 +54,7 @@ public class RestClientTests extends RestClientTestCase { public void testCloseIsIdempotent() throws IOException { List nodes = singletonList(new Node(new HttpHost("localhost", 9200))); CloseableHttpAsyncClient closeableHttpAsyncClient = mock(CloseableHttpAsyncClient.class); - RestClient restClient = new RestClient(closeableHttpAsyncClient, 1_000, new Header[0], nodes, null, null); + RestClient restClient = new RestClient(closeableHttpAsyncClient, 1_000, new Header[0], nodes, null, null, null); restClient.close(); verify(closeableHttpAsyncClient, times(1)).close(); restClient.close(); @@ -475,7 +475,7 @@ private String assertSelectAllRejected( NodeTuple> nodeTuple, private static RestClient createRestClient() { List nodes = Collections.singletonList(new Node(new HttpHost("localhost", 9200))); return new RestClient(mock(CloseableHttpAsyncClient.class), randomLongBetween(1_000, 30_000), - new Header[] {}, nodes, null, null); + new Header[] {}, nodes, null, null, null); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java index d3a0202747d25..d347353a1fb55 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java +++ b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java @@ -36,7 +36,6 @@ import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.ssl.SSLContexts; import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.HasAttributeNodeSelector; import org.elasticsearch.client.HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory; import org.elasticsearch.client.Node; import org.elasticsearch.client.NodeSelector; @@ -54,6 +53,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.security.KeyStore; +import java.util.Iterator; import java.util.concurrent.CountDownLatch; /** @@ -82,8 +82,7 @@ public class RestClientDocumentation { static { RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder(); builder.addHeader("Authorization", "Bearer " + TOKEN); // <1> - builder.setNodeSelector(NodeSelector.NOT_MASTER_ONLY); // <2> - builder.setHttpAsyncResponseConsumerFactory( // <3> + builder.setHttpAsyncResponseConsumerFactory( // <2> new HeapBufferedResponseConsumerFactory(30 * 1024 * 1024 * 1024)); COMMON_OPTIONS = builder.build(); } @@ -115,6 +114,45 @@ public void testUsage() throws IOException, InterruptedException { builder.setMaxRetryTimeoutMillis(10000); // <1> //end::rest-client-init-max-retry-timeout } + { + //tag::rest-client-init-node-selector + RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); + builder.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS); // <1> + //end::rest-client-init-node-selector + } + { + //tag::rest-client-init-allocation-aware-selector + RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); + builder.setNodeSelector(new NodeSelector() { // <1> + @Override + public void select(Iterable nodes) { + /* + * Prefer any node that belongs to rack_one. If none is around + * we will go to another rack till it's time to try and revive + * some of the nodes that belong to rack_one. + */ + boolean foundOne = false; + for (Node node : nodes) { + String rackId = node.getAttributes().get("rack_id").get(0); + if ("rack_one".equals(rackId)) { + foundOne = true; + break; + } + } + if (foundOne) { + Iterator nodesIt = nodes.iterator(); + while (nodesIt.hasNext()) { + Node node = nodesIt.next(); + String rackId = node.getAttributes().get("rack_id").get(0); + if ("rack_one".equals(rackId) == false) { + nodesIt.remove(); + } + } + } + } + }); + //end::rest-client-init-allocation-aware-selector + } { //tag::rest-client-init-failure-listener RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); @@ -198,13 +236,6 @@ public void onFailure(Exception exception) { request.setOptions(options); //end::rest-client-options-customize-header } - { - //tag::rest-client-options-customize-attribute - RequestOptions.Builder options = COMMON_OPTIONS.toBuilder(); - options.setNodeSelector(new HasAttributeNodeSelector("rack", "c12")); // <1> - request.setOptions(options); - //end::rest-client-options-customize-attribute - } } { HttpEntity[] documents = new HttpEntity[10]; diff --git a/docs/java-rest/low-level/configuration.asciidoc b/docs/java-rest/low-level/configuration.asciidoc index b0753496558bb..0b58c82724b76 100644 --- a/docs/java-rest/low-level/configuration.asciidoc +++ b/docs/java-rest/low-level/configuration.asciidoc @@ -99,3 +99,30 @@ http://docs.oracle.com/javase/8/docs/technotes/guides/net/properties.html[`netwo to your http://docs.oracle.com/javase/8/docs/technotes/guides/security/PolicyFiles.html[Java security policy]. + +=== Node selector + +The client sends each request to one of the configured nodes in round-robin +fashion. Nodes can optionally be filtered through a node selector that needs +to be provided when initializing the client. This is useful when sniffing is +enabled, in case only dedicated master nodes should be hit by HTTP requests. +For each request the client will run the eventually configured node selector +to filter the node candidates, then select the next one in the list out of the +remaining ones. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-init-allocation-aware-selector] +-------------------------------------------------- +<1> Set an allocation aware node selector that allows to pick a node in the +local rack if any available, otherwise go to any other node in any rack. It +acts as a preference rather than a strict requirement, given that it goes to +another rack if none of the local nodes are available, rather than returning +no nodes in such case which would make the client forcibly revive a local node +whenever none of the nodes from the preferred rack is available. + +WARNING: Node selectors that do not consistently select the same set of nodes +will make round-robin behaviour unpredictable and possibly unfair. The +preference example above is fine as it reasons about availability of nodes +which already affects the predictability of round-robin. Node selection should +not depend on other external factors or round-robin will not work properly. diff --git a/docs/java-rest/low-level/usage.asciidoc b/docs/java-rest/low-level/usage.asciidoc index 1f8b302715f42..71fadd98988a3 100644 --- a/docs/java-rest/low-level/usage.asciidoc +++ b/docs/java-rest/low-level/usage.asciidoc @@ -196,6 +196,16 @@ include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-init-failur <1> Set a listener that gets notified every time a node fails, in case actions need to be taken. Used internally when sniffing on failure is enabled. +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-init-node-selector] +-------------------------------------------------- +<1> Set the node selector to be used to filter the nodes the client will send +requests to among the ones that are set to the client itself. This is useful +for instance to prevent sending requests to dedicated master nodes when +sniffing is enabled. By default the client sends requests to every configured +node. + ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-init-request-config-callback] @@ -283,8 +293,7 @@ instance and share it between all requests: include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-options-singleton] -------------------------------------------------- <1> Add any headers needed by all requests. -<2> Set a `NodeSelector`. -<3> Customize the response consumer. +<2> Customize the response consumer. `addHeader` is for headers that are required for authorization or to work with a proxy in front of Elasticsearch. There is no need to set the `Content-Type` @@ -315,15 +324,6 @@ adds an extra header: include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-options-customize-header] -------------------------------------------------- -Or you can send requests to nodes with a particular attribute: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-options-customize-attribute] --------------------------------------------------- -<1> Replace the node selector with one that selects nodes on a particular rack. - - ==== Multiple parallel asynchronous actions The client is quite happy to execute many actions in parallel. The following diff --git a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index 02bc304317e68..a8dd91e8b6de2 100644 --- a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -91,8 +91,9 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { - return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion); + final Version masterVersion) { + return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion, + restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index df92b101bf1fd..672d19d5dc2a6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -30,7 +30,6 @@ import org.apache.http.message.BasicHeader; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.ssl.SSLContexts; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; @@ -47,6 +46,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -381,6 +381,11 @@ protected String getProtocol() { protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException { RestClientBuilder builder = RestClient.builder(hosts); + configureClient(builder, settings); + return builder.build(); + } + + protected static void configureClient(RestClientBuilder builder, Settings settings) throws IOException { String keystorePath = settings.get(TRUSTSTORE_PATH); if (keystorePath != null) { final String keystorePass = settings.get(TRUSTSTORE_PASSWORD); @@ -399,11 +404,10 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(keyStore, null).build(); SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(sslcontext); builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy)); - } catch (KeyStoreException|NoSuchAlgorithmException|KeyManagementException|CertificateException e) { + } catch (KeyStoreException |NoSuchAlgorithmException |KeyManagementException |CertificateException e) { throw new RuntimeException("Error setting up ssl", e); } } - try (ThreadContext threadContext = new ThreadContext(settings)) { Header[] defaultHeaders = new Header[threadContext.getHeaders().size()]; int i = 0; @@ -412,7 +416,6 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE } builder.setDefaultHeaders(defaultHeaders); } - final String requestTimeoutString = settings.get(CLIENT_RETRY_TIMEOUT); if (requestTimeoutString != null) { final TimeValue maxRetryTimeout = TimeValue.parseTimeValue(requestTimeoutString, CLIENT_RETRY_TIMEOUT); @@ -423,7 +426,6 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE final TimeValue socketTimeout = TimeValue.parseTimeValue(socketTimeoutString, CLIENT_SOCKET_TIMEOUT); builder.setRequestConfigCallback(conf -> conf.setSocketTimeout(Math.toIntExact(socketTimeout.getMillis()))); } - return builder.build(); } @SuppressWarnings("unchecked") diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java index 33443aa5b6e38..ddd5837663521 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java @@ -27,6 +27,8 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import java.io.IOException; @@ -47,8 +49,9 @@ public ClientYamlDocsTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { - super(restSpec, restClient, hosts, esVersion, masterVersion); + final Version masterVersion, + final CheckedConsumer clientBuilderConsumer) { + super(restSpec, restClient, hosts, esVersion, masterVersion, clientBuilderConsumer); } @Override @@ -66,9 +69,9 @@ public ClientYamlTestResponse callApi(String apiName, Map params request.addParameter(param.getKey(), param.getValue()); } request.setEntity(entity); - setOptions(request, headers, nodeSelector); + setOptions(request, headers); try { - Response response = restClient.performRequest(request); + Response response = getRestClient(nodeSelector).performRequest(request); return new ClientYamlTestResponse(response); } catch (ResponseException e) { throw new ClientYamlTestResponseException(e); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java index 99da661402855..fdc10a1a246e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java @@ -26,18 +26,22 @@ import org.apache.http.util.EntityUtils; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; +import org.elasticsearch.client.Node; import org.elasticsearch.client.NodeSelector; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestPath; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import java.io.IOException; +import java.io.UncheckedIOException; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; @@ -58,21 +62,24 @@ public class ClientYamlTestClient { private static final ContentType YAML_CONTENT_TYPE = ContentType.create("application/yaml"); private final ClientYamlSuiteRestSpec restSpec; - protected final RestClient restClient; + protected final Map restClients = new HashMap<>(); private final Version esVersion; private final Version masterVersion; + private final CheckedConsumer clientBuilderConsumer; public ClientYamlTestClient( final ClientYamlSuiteRestSpec restSpec, final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { + final Version masterVersion, + final CheckedConsumer clientBuilderConsumer) { assert hosts.size() > 0; this.restSpec = restSpec; - this.restClient = restClient; + this.restClients.put(NodeSelector.ANY, restClient); this.esVersion = esVersion; this.masterVersion = masterVersion; + this.clientBuilderConsumer = clientBuilderConsumer; } public Version getEsVersion() { @@ -172,30 +179,43 @@ public ClientYamlTestResponse callApi(String apiName, Map params requestPath = finalPath.toString(); } - - logger.debug("calling api [{}]", apiName); Request request = new Request(requestMethod, requestPath); for (Map.Entry param : queryStringParams.entrySet()) { request.addParameter(param.getKey(), param.getValue()); } request.setEntity(entity); - setOptions(request, headers, nodeSelector); + setOptions(request, headers); + try { - Response response = restClient.performRequest(request); + Response response = getRestClient(nodeSelector).performRequest(request); return new ClientYamlTestResponse(response); } catch(ResponseException e) { throw new ClientYamlTestResponseException(e); } } - protected static void setOptions(Request request, Map headers, NodeSelector nodeSelector) { + protected RestClient getRestClient(NodeSelector nodeSelector) { + //lazily build a new client in case we need to point to some specific node + return restClients.computeIfAbsent(nodeSelector, selector -> { + RestClient anyClient = restClients.get(NodeSelector.ANY); + RestClientBuilder builder = RestClient.builder(anyClient.getNodes().toArray(new Node[0])); + try { + clientBuilderConsumer.accept(builder); + } catch(IOException e) { + throw new UncheckedIOException(e); + } + builder.setNodeSelector(nodeSelector); + return builder.build(); + }); + } + + protected static void setOptions(Request request, Map headers) { RequestOptions.Builder options = request.getOptions().toBuilder(); for (Map.Entry header : headers.entrySet()) { logger.debug("Adding header {} with value {}", header.getKey(), header.getValue()); options.addHeader(header.getKey(), header.getValue()); } - options.setNodeSelector(nodeSelector); request.setOptions(options); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index c0b5b1e95886c..6afc123520bb0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -47,6 +47,7 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -122,7 +123,7 @@ public static void initializeUseDefaultNumberOfShards() { public void initAndResetContext() throws Exception { if (restTestExecutionContext == null) { // Sniff host metadata in case we need it in the yaml tests - List nodesWithMetadata = sniffHostMetadata(adminClient()); + List nodesWithMetadata = sniffHostMetadata(); client().setNodes(nodesWithMetadata); adminClient().setNodes(nodesWithMetadata); @@ -163,8 +164,9 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { - return new ClientYamlTestClient(restSpec, restClient, hosts, esVersion, masterVersion); + final Version masterVersion) { + return new ClientYamlTestClient(restSpec, restClient, hosts, esVersion, masterVersion, + restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); } /** @@ -195,8 +197,7 @@ public static Iterable createParameters(NamedXContentRegistry executea } //sort the candidates so they will always be in the same order before being shuffled, for repeatability - Collections.sort(tests, - (o1, o2) -> ((ClientYamlTestCandidate)o1[0]).getTestPath().compareTo(((ClientYamlTestCandidate)o2[0]).getTestPath())); + tests.sort(Comparator.comparing(o -> ((ClientYamlTestCandidate) o[0]).getTestPath())); return tests; } @@ -401,7 +402,7 @@ protected boolean randomizeContentType() { /** * Sniff the cluster for host metadata. */ - private List sniffHostMetadata(RestClient client) throws IOException { + private List sniffHostMetadata() throws IOException { ElasticsearchNodesSniffer.Scheme scheme = ElasticsearchNodesSniffer.Scheme.valueOf(getProtocol().toUpperCase(Locale.ROOT)); ElasticsearchNodesSniffer sniffer = new ElasticsearchNodesSniffer( diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java index 87f2d7f9a53f8..5da8601a9f340 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java @@ -73,7 +73,7 @@ public void testAddingDoWithNodeSelectorWithSkip() { section.setSkipSection(new SkipSection(null, singletonList("node_selector"), null)); DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); ApiCallSection apiCall = new ApiCallSection("test"); - apiCall.setNodeSelector(NodeSelector.NOT_MASTER_ONLY); + apiCall.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS); doSection.setApiCallSection(apiCall); section.addExecutableSection(doSection); } @@ -84,7 +84,7 @@ public void testAddingDoWithNodeSelectorWithSkipButNotWarnings() { section.setSkipSection(new SkipSection(null, singletonList("yaml"), null)); DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); ApiCallSection apiCall = new ApiCallSection("test"); - apiCall.setNodeSelector(NodeSelector.NOT_MASTER_ONLY); + apiCall.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS); doSection.setApiCallSection(apiCall); Exception e = expectThrows(IllegalArgumentException.class, () -> section.addExecutableSection(doSection)); assertEquals("Attempted to add a [do] with a [node_selector] section without a corresponding" diff --git a/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java b/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java index af9fb45b8a0c8..0196406c478cd 100644 --- a/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java +++ b/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.test.rest.XPackRestIT; import org.junit.After; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -58,8 +57,9 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { - return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion); + final Version masterVersion) { + return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion, + restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); } /** From 3c42bfad4e68c464ee57420f39e390efd2888761 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 22 Jun 2018 17:24:27 +0200 Subject: [PATCH 26/34] Fix Mockito trying to mock IOException that isn't thrown by method (#31433) (#31527) --- .../xpack/monitoring/exporter/http/HttpExporterTests.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java index ff83621119ef6..a96dc8ebb127a 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java @@ -460,7 +460,6 @@ public void testHttpExporter() throws Exception { } } - @AwaitsFix (bugUrl = "https://github.com/elastic/elasticsearch/issues/31433" ) public void testHttpExporterShutdown() throws Exception { final Config config = createConfig(Settings.EMPTY); final RestClient client = mock(RestClient.class); @@ -469,7 +468,7 @@ public void testHttpExporterShutdown() throws Exception { final MultiHttpResource resource = mock(MultiHttpResource.class); if (sniffer != null && rarely()) { - doThrow(randomFrom(new IOException("expected"), new RuntimeException("expected"))).when(sniffer).close(); + doThrow(new RuntimeException("expected")).when(sniffer).close(); } if (rarely()) { From 7313a987f4a8ba1e39d6105f7d74be9186faa95b Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 22 Jun 2018 17:44:13 +0200 Subject: [PATCH 27/34] fix repository update with the same settings but different type (#31458) fix repository update with the same settings but different type --- .../repositories/RepositoriesService.java | 2 +- .../repositories/RepositoriesServiceIT.java | 96 +++++++++++++++++++ 2 files changed, 97 insertions(+), 1 deletion(-) create mode 100644 server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index 636e108468e82..d5b2a6413e9a9 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -349,7 +349,7 @@ private boolean registerRepository(RepositoryMetaData repositoryMetaData) throws Repository previous = repositories.get(repositoryMetaData.name()); if (previous != null) { RepositoryMetaData previousMetadata = previous.getMetadata(); - if (!previousMetadata.type().equals(repositoryMetaData.type()) && previousMetadata.settings().equals(repositoryMetaData.settings())) { + if (previousMetadata.equals(repositoryMetaData)) { // Previous version is the same as this one - ignore it return false; } diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java new file mode 100644 index 0000000000000..05c9746aa49ac --- /dev/null +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories; + +import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.snapshots.mockstore.MockRepository; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; + +import java.util.Collection; +import java.util.Collections; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; + +public class RepositoriesServiceIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Collections.singletonList(MockRepository.Plugin.class); + } + + public void testUpdateRepository() { + final InternalTestCluster cluster = internalCluster(); + + final String repositoryName = "test-repo"; + + final Client client = client(); + final RepositoriesService repositoriesService = + cluster.getDataOrMasterNodeInstances(RepositoriesService.class).iterator().next(); + final Settings settings = cluster.getDefaultSettings(); + + final Settings.Builder repoSettings = Settings.builder().put("location", randomRepoPath()); + + assertAcked(client.admin().cluster().preparePutRepository(repositoryName) + .setType(FsRepository.TYPE) + .setSettings(repoSettings) + .get()); + + final GetRepositoriesResponse originalGetRepositoriesResponse = + client.admin().cluster().prepareGetRepositories(repositoryName).get(); + + assertThat(originalGetRepositoriesResponse.repositories(), hasSize(1)); + RepositoryMetaData originalRepositoryMetaData = originalGetRepositoriesResponse.repositories().get(0); + + assertThat(originalRepositoryMetaData.type(), equalTo(FsRepository.TYPE)); + + final Repository originalRepository = repositoriesService.repository(repositoryName); + assertThat(originalRepository, instanceOf(FsRepository.class)); + + final boolean updated = randomBoolean(); + final String updatedRepositoryType = updated ? "mock" : FsRepository.TYPE; + + assertAcked(client.admin().cluster().preparePutRepository(repositoryName) + .setType(updatedRepositoryType) + .setSettings(repoSettings) + .get()); + + final GetRepositoriesResponse updatedGetRepositoriesResponse = + client.admin().cluster().prepareGetRepositories(repositoryName).get(); + + assertThat(updatedGetRepositoriesResponse.repositories(), hasSize(1)); + final RepositoryMetaData updatedRepositoryMetaData = updatedGetRepositoriesResponse.repositories().get(0); + + assertThat(updatedRepositoryMetaData.type(), equalTo(updatedRepositoryType)); + + final Repository updatedRepository = repositoriesService.repository(repositoryName); + assertThat(updatedRepository, updated ? not(sameInstance(originalRepository)) : sameInstance(originalRepository)); + } +} From f04c579203299bbbb9397609344b59e6f6f8f18f Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 22 Jun 2018 21:08:11 +0200 Subject: [PATCH 28/34] IndexShard should not return null stats (#31528) IndexShard should not return null stats - empty stats or AlreadyCloseException if it's closed is better --- .../stats/TransportClusterStatsAction.java | 17 ++- .../admin/indices/stats/CommonStats.java | 107 +++++++++--------- .../admin/indices/stats/ShardStats.java | 1 + .../stats/TransportIndicesStatsAction.java | 15 ++- .../elasticsearch/index/shard/IndexShard.java | 16 +-- .../elasticsearch/indices/IndicesService.java | 17 ++- .../index/shard/IndexShardTests.java | 34 ++++++ .../test/InternalTestCluster.java | 10 +- 8 files changed, 148 insertions(+), 69 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 2478045787683..227b1359d4f09 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.cluster.stats; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; @@ -36,6 +37,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.CommitStats; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.node.NodeService; @@ -96,13 +99,23 @@ protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeReq for (IndexShard indexShard : indexService) { if (indexShard.routingEntry() != null && indexShard.routingEntry().active()) { // only report on fully started shards + CommitStats commitStats; + SeqNoStats seqNoStats; + try { + commitStats = indexShard.commitStats(); + seqNoStats = indexShard.seqNoStats(); + } catch (AlreadyClosedException e) { + // shard is closed - no stats is fine + commitStats = null; + seqNoStats = null; + } shardsStats.add( new ShardStats( indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, SHARD_STATS_FLAGS), - indexShard.commitStats(), - indexShard.seqNoStats())); + commitStats, + seqNoStats)); } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java index e244369c0c312..1bf7342be952c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.stats; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -167,57 +168,61 @@ public CommonStats(CommonStatsFlags flags) { public CommonStats(IndicesQueryCache indicesQueryCache, IndexShard indexShard, CommonStatsFlags flags) { CommonStatsFlags.Flag[] setFlags = flags.getFlags(); for (CommonStatsFlags.Flag flag : setFlags) { - switch (flag) { - case Docs: - docs = indexShard.docStats(); - break; - case Store: - store = indexShard.storeStats(); - break; - case Indexing: - indexing = indexShard.indexingStats(flags.types()); - break; - case Get: - get = indexShard.getStats(); - break; - case Search: - search = indexShard.searchStats(flags.groups()); - break; - case Merge: - merge = indexShard.mergeStats(); - break; - case Refresh: - refresh = indexShard.refreshStats(); - break; - case Flush: - flush = indexShard.flushStats(); - break; - case Warmer: - warmer = indexShard.warmerStats(); - break; - case QueryCache: - queryCache = indicesQueryCache.getStats(indexShard.shardId()); - break; - case FieldData: - fieldData = indexShard.fieldDataStats(flags.fieldDataFields()); - break; - case Completion: - completion = indexShard.completionStats(flags.completionDataFields()); - break; - case Segments: - segments = indexShard.segmentStats(flags.includeSegmentFileSizes()); - break; - case Translog: - translog = indexShard.translogStats(); - break; - case RequestCache: - requestCache = indexShard.requestCache().stats(); - break; - case Recovery: - recoveryStats = indexShard.recoveryStats(); - break; - default: - throw new IllegalStateException("Unknown Flag: " + flag); + try { + switch (flag) { + case Docs: + docs = indexShard.docStats(); + break; + case Store: + store = indexShard.storeStats(); + break; + case Indexing: + indexing = indexShard.indexingStats(flags.types()); + break; + case Get: + get = indexShard.getStats(); + break; + case Search: + search = indexShard.searchStats(flags.groups()); + break; + case Merge: + merge = indexShard.mergeStats(); + break; + case Refresh: + refresh = indexShard.refreshStats(); + break; + case Flush: + flush = indexShard.flushStats(); + break; + case Warmer: + warmer = indexShard.warmerStats(); + break; + case QueryCache: + queryCache = indicesQueryCache.getStats(indexShard.shardId()); + break; + case FieldData: + fieldData = indexShard.fieldDataStats(flags.fieldDataFields()); + break; + case Completion: + completion = indexShard.completionStats(flags.completionDataFields()); + break; + case Segments: + segments = indexShard.segmentStats(flags.includeSegmentFileSizes()); + break; + case Translog: + translog = indexShard.translogStats(); + break; + case RequestCache: + requestCache = indexShard.requestCache().stats(); + break; + case Recovery: + recoveryStats = indexShard.recoveryStats(); + break; + default: + throw new IllegalStateException("Unknown Flag: " + flag); + } + } catch (AlreadyClosedException e) { + // shard is closed - no stats is fine } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java index 8b41c4bf90c99..898f3d69456b0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java @@ -70,6 +70,7 @@ public CommonStats getStats() { return this.commonStats; } + @Nullable public CommitStats getCommitStats() { return this.commitStats; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index 9668a1a41fac5..d09aa58938450 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.stats; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; @@ -33,6 +34,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.CommitStats; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.IndicesService; @@ -100,7 +103,17 @@ protected ShardStats shardOperation(IndicesStatsRequest request, ShardRouting sh } CommonStats commonStats = new CommonStats(indicesService.getIndicesQueryCache(), indexShard, request.flags()); + CommitStats commitStats; + SeqNoStats seqNoStats; + try { + commitStats = indexShard.commitStats(); + seqNoStats = indexShard.seqNoStats(); + } catch (AlreadyClosedException e) { + // shard is closed - no stats is fine + commitStats = null; + seqNoStats = null; + } return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), commonStats, - indexShard.commitStats(), indexShard.seqNoStats()); + commitStats, seqNoStats); } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index fb987fe035afa..5bd8f9abc6e04 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -868,21 +868,19 @@ public DocsStats docStats() { } /** - * @return {@link CommitStats} if engine is open, otherwise null + * @return {@link CommitStats} + * @throws AlreadyClosedException if shard is closed */ - @Nullable public CommitStats commitStats() { - Engine engine = getEngineOrNull(); - return engine == null ? null : engine.commitStats(); + return getEngine().commitStats(); } /** - * @return {@link SeqNoStats} if engine is open, otherwise null + * @return {@link SeqNoStats} + * @throws AlreadyClosedException if shard is closed */ - @Nullable public SeqNoStats seqNoStats() { - Engine engine = getEngineOrNull(); - return engine == null ? null : engine.getSeqNoStats(replicationTracker.getGlobalCheckpoint()); + return getEngine().getSeqNoStats(replicationTracker.getGlobalCheckpoint()); } public IndexingStats indexingStats(String... types) { @@ -912,8 +910,6 @@ public StoreStats storeStats() { return store.stats(); } catch (IOException e) { throw new ElasticsearchException("io exception while building 'store stats'", e); - } catch (AlreadyClosedException ex) { - return null; // already closed } } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 5141ca5a0c178..4f535f01da4bf 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -79,6 +79,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.cache.request.ShardRequestCache; +import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -91,6 +92,7 @@ import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; @@ -333,13 +335,24 @@ IndexShardStats indexShardStats(final IndicesService indicesService, final Index return null; } + CommitStats commitStats; + SeqNoStats seqNoStats; + try { + commitStats = indexShard.commitStats(); + seqNoStats = indexShard.seqNoStats(); + } catch (AlreadyClosedException e) { + // shard is closed - no stats is fine + commitStats = null; + seqNoStats = null; + } + return new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, flags), - indexShard.commitStats(), - indexShard.seqNoStats()) + commitStats, + seqNoStats) }); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 31afb5ed42fc0..ac52378fc6b9d 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -73,6 +73,7 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.EngineTestCase; @@ -88,6 +89,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; @@ -3082,4 +3084,36 @@ public void onShardInactive(IndexShard indexShard) { closeShards(primary); } + public void testOnCloseStats() throws IOException { + final IndexShard indexShard = newStartedShard(true); + + for (int i = 0; i < 3; i++) { + indexDoc(indexShard, "_doc", "" + i, "{\"foo\" : \"" + randomAlphaOfLength(10) + "\"}"); + indexShard.refresh("test"); // produce segments + } + + // check stats on closed and on opened shard + if (randomBoolean()) { + closeShards(indexShard); + + expectThrows(AlreadyClosedException.class, () -> indexShard.seqNoStats()); + expectThrows(AlreadyClosedException.class, () -> indexShard.commitStats()); + expectThrows(AlreadyClosedException.class, () -> indexShard.storeStats()); + + } else { + final SeqNoStats seqNoStats = indexShard.seqNoStats(); + assertThat(seqNoStats.getLocalCheckpoint(), equalTo(2L)); + + final CommitStats commitStats = indexShard.commitStats(); + assertThat(commitStats.getGeneration(), equalTo(2L)); + + final StoreStats storeStats = indexShard.storeStats(); + + assertThat(storeStats.sizeInBytes(), greaterThan(0L)); + + closeShards(indexShard); + } + + } + } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index efe775f7415c2..51c4f4d1e32f3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1111,17 +1111,21 @@ private void assertSameSyncIdSameDocs() { IndicesService indexServices = getInstance(IndicesService.class, nodeAndClient.name); for (IndexService indexService : indexServices) { for (IndexShard indexShard : indexService) { - CommitStats commitStats = indexShard.commitStats(); - if (commitStats != null) { // null if the engine is closed or if the shard is recovering + try { + CommitStats commitStats = indexShard.commitStats(); String syncId = commitStats.getUserData().get(Engine.SYNC_COMMIT_ID); if (syncId != null) { long liveDocsOnShard = commitStats.getNumDocs(); if (docsOnShards.get(syncId) != null) { - assertThat("sync id is equal but number of docs does not match on node " + nodeAndClient.name + ". expected " + docsOnShards.get(syncId) + " but got " + liveDocsOnShard, docsOnShards.get(syncId), equalTo(liveDocsOnShard)); + assertThat("sync id is equal but number of docs does not match on node " + + nodeAndClient.name + ". expected " + docsOnShards.get(syncId) + " but got " + + liveDocsOnShard, docsOnShards.get(syncId), equalTo(liveDocsOnShard)); } else { docsOnShards.put(syncId, liveDocsOnShard); } } + } catch (AlreadyClosedException e) { + // the engine is closed or if the shard is recovering } } } From 7a150ec06d5b846caa89520c1388e9c751a0c8af Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 22 Jun 2018 15:03:01 -0700 Subject: [PATCH 29/34] Core: Combine doExecute methods in TransportAction (#31517) TransportAction currently contains 2 doExecute methods, one which takes a the task, and one that does not. The latter is what some subclasses implement, while the first one just calls the latter, dropping the given task. This commit combines these methods, in favor of just always assuming a task is present. --- .../noop/action/bulk/TransportNoopBulkAction.java | 3 ++- .../action/search/TransportNoopSearchAction.java | 3 ++- .../ingest/common/GrokProcessorGetAction.java | 3 ++- .../TransportMultiSearchTemplateAction.java | 3 ++- .../mustache/TransportSearchTemplateAction.java | 3 ++- .../painless/PainlessExecuteAction.java | 3 ++- .../index/rankeval/TransportRankEvalAction.java | 3 ++- .../index/reindex/TransportDeleteByQueryAction.java | 5 ----- .../index/reindex/TransportReindexAction.java | 5 ----- .../index/reindex/TransportUpdateByQueryAction.java | 5 ----- .../node/tasks/get/TransportGetTaskAction.java | 5 ----- .../cluster/remote/TransportRemoteInfoAction.java | 3 ++- .../indices/flush/TransportSyncedFlushAction.java | 3 ++- .../get/TransportGetFieldMappingsAction.java | 3 ++- .../action/bulk/TransportBulkAction.java | 5 ----- .../action/explain/TransportExplainAction.java | 5 +++-- .../fieldcaps/TransportFieldCapabilitiesAction.java | 4 ++-- .../action/get/TransportMultiGetAction.java | 3 ++- .../ingest/SimulatePipelineTransportAction.java | 3 ++- .../action/main/TransportMainAction.java | 3 ++- .../action/search/TransportClearScrollAction.java | 3 ++- .../action/search/TransportMultiSearchAction.java | 3 ++- .../action/search/TransportSearchAction.java | 5 ----- .../action/search/TransportSearchScrollAction.java | 4 ---- .../action/support/TransportAction.java | 6 +----- .../support/broadcast/TransportBroadcastAction.java | 5 ----- .../node/TransportBroadcastByNodeAction.java | 5 ----- .../support/master/TransportMasterNodeAction.java | 6 ------ .../action/support/nodes/TransportNodesAction.java | 6 ------ .../TransportBroadcastReplicationAction.java | 6 ------ .../replication/TransportReplicationAction.java | 5 ----- .../TransportInstanceSingleOperationAction.java | 2 +- .../single/shard/TransportSingleShardAction.java | 2 +- .../action/support/tasks/TransportTasksAction.java | 6 ------ .../TransportMultiTermVectorsAction.java | 3 ++- .../action/update/TransportUpdateAction.java | 13 +++++++------ .../org/elasticsearch/action/ActionModuleTests.java | 3 ++- .../elasticsearch/action/main/MainActionTests.java | 3 ++- .../action/search/MultiSearchActionTookTests.java | 3 ++- .../support/TransportActionFilterChainTests.java | 4 ++-- .../client/node/NodeClientHeadersTests.java | 3 ++- .../xpack/core/action/TransportXPackInfoAction.java | 3 ++- .../action/TransportGetCertificateInfoAction.java | 3 ++- .../core/action/TransportXPackInfoActionTests.java | 3 ++- .../graph/action/TransportGraphExploreAction.java | 3 ++- .../ml/action/TransportDeleteCalendarAction.java | 3 ++- .../action/TransportDeleteCalendarEventAction.java | 4 +++- .../ml/action/TransportDeleteExpiredDataAction.java | 4 +++- .../ml/action/TransportDeleteFilterAction.java | 3 ++- .../action/TransportDeleteModelSnapshotAction.java | 4 +++- .../xpack/ml/action/TransportGetBucketsAction.java | 3 ++- .../ml/action/TransportGetCalendarEventsAction.java | 3 ++- .../ml/action/TransportGetCalendarsAction.java | 3 ++- .../ml/action/TransportGetCategoriesAction.java | 3 ++- .../xpack/ml/action/TransportGetFiltersAction.java | 3 ++- .../ml/action/TransportGetInfluencersAction.java | 3 ++- .../ml/action/TransportGetModelSnapshotsAction.java | 4 +++- .../ml/action/TransportGetOverallBucketsAction.java | 4 +++- .../xpack/ml/action/TransportGetRecordsAction.java | 3 ++- .../xpack/ml/action/TransportMlInfoAction.java | 3 ++- .../action/TransportPostCalendarEventsAction.java | 3 ++- .../ml/action/TransportPreviewDatafeedAction.java | 3 ++- .../xpack/ml/action/TransportPutCalendarAction.java | 3 ++- .../xpack/ml/action/TransportPutFilterAction.java | 3 ++- .../ml/action/TransportUpdateCalendarJobAction.java | 3 ++- .../ml/action/TransportUpdateFilterAction.java | 3 ++- .../action/TransportUpdateModelSnapshotAction.java | 4 +++- .../ml/action/TransportValidateDetectorAction.java | 3 ++- .../ml/action/TransportValidateJobConfigAction.java | 4 +++- .../action/TransportMonitoringBulkAction.java | 3 ++- .../rollup/action/TransportGetRollupCapsAction.java | 3 ++- .../rollup/action/TransportRollupSearchAction.java | 2 +- .../action/role/TransportDeleteRoleAction.java | 3 ++- .../action/role/TransportGetRolesAction.java | 3 ++- .../action/role/TransportPutRoleAction.java | 3 ++- .../TransportDeleteRoleMappingAction.java | 4 ++-- .../rolemapping/TransportGetRoleMappingsAction.java | 4 ++-- .../rolemapping/TransportPutRoleMappingAction.java | 4 ++-- .../saml/TransportSamlAuthenticateAction.java | 4 ++-- .../saml/TransportSamlInvalidateSessionAction.java | 4 ++-- .../action/saml/TransportSamlLogoutAction.java | 4 ++-- .../TransportSamlPrepareAuthenticationAction.java | 5 +++-- .../action/token/TransportCreateTokenAction.java | 3 ++- .../token/TransportInvalidateTokenAction.java | 3 ++- .../action/token/TransportRefreshTokenAction.java | 3 ++- .../action/user/TransportAuthenticateAction.java | 3 ++- .../action/user/TransportChangePasswordAction.java | 3 ++- .../action/user/TransportDeleteUserAction.java | 3 ++- .../action/user/TransportGetUsersAction.java | 3 ++- .../action/user/TransportHasPrivilegesAction.java | 3 ++- .../action/user/TransportPutUserAction.java | 3 ++- .../action/user/TransportSetEnabledAction.java | 3 ++- .../action/role/TransportDeleteRoleActionTests.java | 7 ++++--- .../action/role/TransportGetRolesActionTests.java | 9 +++++---- .../action/role/TransportPutRoleActionTests.java | 7 ++++--- .../TransportGetRoleMappingsActionTests.java | 7 ++++--- .../TransportPutRoleMappingActionTests.java | 3 ++- .../TransportSamlInvalidateSessionActionTests.java | 3 ++- .../action/saml/TransportSamlLogoutActionTests.java | 3 ++- .../user/TransportAuthenticateActionTests.java | 7 ++++--- .../user/TransportChangePasswordActionTests.java | 9 +++++---- .../action/user/TransportDeleteUserActionTests.java | 11 ++++++----- .../action/user/TransportGetUsersActionTests.java | 13 +++++++------ .../user/TransportHasPrivilegesActionTests.java | 9 +++++---- .../action/user/TransportPutUserActionTests.java | 11 ++++++----- .../action/user/TransportSetEnabledActionTests.java | 11 ++++++----- .../sql/plugin/TransportSqlClearCursorAction.java | 3 ++- .../xpack/sql/plugin/TransportSqlQueryAction.java | 3 ++- .../sql/plugin/TransportSqlTranslateAction.java | 3 ++- .../transport/actions/WatcherTransportAction.java | 6 ++++-- .../actions/delete/TransportDeleteWatchAction.java | 3 ++- 111 files changed, 240 insertions(+), 224 deletions(-) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java index 0f6748b5e826c..b9520e667be67 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; public class TransportNoopBulkAction extends HandledTransportAction { @@ -42,7 +43,7 @@ public TransportNoopBulkAction(Settings settings, TransportService transportServ } @Override - protected void doExecute(BulkRequest request, ActionListener listener) { + protected void doExecute(Task task, BulkRequest request, ActionListener listener) { final int itemCount = request.requests().size(); // simulate at least a realistic amount of data that gets serialized BulkItemResponse[] bulkItemResponses = new BulkItemResponse[itemCount]; diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java index 040c2d5f52d1d..099b5a3a8b07c 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileShardResults; import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.Collections; @@ -44,7 +45,7 @@ public TransportNoopSearchAction(Settings settings, TransportService transportSe } @Override - protected void doExecute(SearchRequest request, ActionListener listener) { + protected void doExecute(Task task, SearchRequest request, ActionListener listener) { listener.onResponse(new SearchResponse(new InternalSearchResponse( new SearchHits( new SearchHit[0], 0L, 0.0f), diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index 85a8f5e48079c..2fae5d77bcce3 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -41,6 +41,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -118,7 +119,7 @@ public TransportAction(Settings settings, TransportService transportService, Act } @Override - protected void doExecute(Request request, ActionListener listener) { + protected void doExecute(Task task, Request request, ActionListener listener) { try { listener.onResponse(new Response(GROK_PATTERNS)); } catch (Exception e) { diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java index 79fea3c6d62cd..6e0baed9be879 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; @@ -54,7 +55,7 @@ public TransportMultiSearchTemplateAction(Settings settings, TransportService tr } @Override - protected void doExecute(MultiSearchTemplateRequest request, ActionListener listener) { + protected void doExecute(Task task, MultiSearchTemplateRequest request, ActionListener listener) { List originalSlots = new ArrayList<>(); MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); multiSearchRequest.indicesOptions(request.indicesOptions()); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index 45de41f51a3d2..2f880b56dc005 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -38,6 +38,7 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -63,7 +64,7 @@ public TransportSearchTemplateAction(Settings settings, TransportService transpo } @Override - protected void doExecute(SearchTemplateRequest request, ActionListener listener) { + protected void doExecute(Task task, SearchTemplateRequest request, ActionListener listener) { final SearchTemplateResponse response = new SearchTemplateResponse(); try { SearchRequest searchRequest = convert(request, response, scriptService, xContentRegistry); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index 1bfd013b0d5a5..01139f6cf2e70 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -48,6 +48,7 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -285,7 +286,7 @@ public TransportAction(Settings settings, TransportService transportService, this.scriptService = scriptService; } @Override - protected void doExecute(Request request, ActionListener listener) { + protected void doExecute(Task task, Request request, ActionListener listener) { switch (request.context) { case PAINLESS_TEST: PainlessTestScript.Factory factory = scriptService.compile(request.script, PainlessTestScript.CONTEXT); diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java index 81e9b5cf42c4c..80d3d674aed3b 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java @@ -40,6 +40,7 @@ import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -83,7 +84,7 @@ public TransportRankEvalAction(Settings settings, ActionFilters actionFilters, C } @Override - protected void doExecute(RankEvalRequest request, ActionListener listener) { + protected void doExecute(Task task, RankEvalRequest request, ActionListener listener) { RankEvalSpec evaluationSpecification = request.getRankEvalSpec(); EvaluationMetric metric = evaluationSpecification.getMetric(); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java index 9be54f4f76104..c1defe56adc6f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -67,9 +67,4 @@ public void doExecute(Task task, DeleteByQueryRequest request, ActionListener listener) { - throw new UnsupportedOperationException("task required"); - } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 62be1e2cb613a..e54b5f50ae674 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -134,11 +134,6 @@ protected void doExecute(Task task, ReindexRequest request, ActionListener listener) { - throw new UnsupportedOperationException("task required"); - } - static void checkRemoteWhitelist(CharacterRunAutomaton whitelist, RemoteInfo remoteInfo) { if (remoteInfo == null) { return; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index c497374d944e8..34ae3fdd0c62f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -78,11 +78,6 @@ protected void doExecute(Task task, UpdateByQueryRequest request, ActionListener ); } - @Override - protected void doExecute(UpdateByQueryRequest request, ActionListener listener) { - throw new UnsupportedOperationException("task required"); - } - /** * Simple implementation of update-by-query using scrolling and bulk. */ diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java index 9e841b97e7e07..927d2e47680c5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java @@ -81,11 +81,6 @@ public TransportGetTaskAction(Settings settings, ThreadPool threadPool, Transpor this.xContentRegistry = xContentRegistry; } - @Override - protected void doExecute(GetTaskRequest request, ActionListener listener) { - throw new UnsupportedOperationException("Task is required"); - } - @Override protected void doExecute(Task thisTask, GetTaskRequest request, ActionListener listener) { if (clusterService.localNode().getId().equals(request.getTaskId().getNodeId())) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java index edf8eae187345..743a35998355c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java @@ -22,6 +22,7 @@ import java.util.function.Supplier; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.action.search.SearchTransportService; import org.elasticsearch.action.support.ActionFilters; @@ -45,7 +46,7 @@ public TransportRemoteInfoAction(Settings settings, TransportService transportSe } @Override - protected void doExecute(RemoteInfoRequest remoteInfoRequest, ActionListener listener) { + protected void doExecute(Task task, RemoteInfoRequest remoteInfoRequest, ActionListener listener) { listener.onResponse(new RemoteInfoResponse(remoteClusterService.getRemoteConnectionInfos().collect(toList()))); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java index 9762fe6cbb814..1ab46bfd926c6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.flush.SyncedFlushService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; /** @@ -45,7 +46,7 @@ public TransportSyncedFlushAction(Settings settings, TransportService transportS } @Override - protected void doExecute(SyncedFlushRequest request, ActionListener listener) { + protected void doExecute(Task task, SyncedFlushRequest request, ActionListener listener) { syncedFlushService.attemptSyncedFlush(request.indices(), request.indicesOptions(), listener); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java index bf61fc5e8633f..cf2ba48dc8771 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -53,7 +54,7 @@ public TransportGetFieldMappingsAction(Settings settings, TransportService trans } @Override - protected void doExecute(GetFieldMappingsRequest request, final ActionListener listener) { + protected void doExecute(Task task, GetFieldMappingsRequest request, final ActionListener listener) { ClusterState clusterState = clusterService.state(); String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); final AtomicInteger indexCounter = new AtomicInteger(); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 3ac75eb5869d7..247970dafcee3 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -123,11 +123,6 @@ public TransportBulkAction(Settings settings, ThreadPool threadPool, TransportSe clusterService.addStateApplier(this.ingestForwarder); } - @Override - protected final void doExecute(final BulkRequest bulkRequest, final ActionListener listener) { - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, BulkRequest bulkRequest, ActionListener listener) { if (bulkRequest.hasIndexRequestsWithPipelines()) { diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index 18c1ea41e95b9..5ea178f595acf 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -44,6 +44,7 @@ import org.elasticsearch.search.internal.ShardSearchLocalRequest; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.rescore.Rescorer; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -67,9 +68,9 @@ public TransportExplainAction(Settings settings, ThreadPool threadPool, ClusterS } @Override - protected void doExecute(ExplainRequest request, ActionListener listener) { + protected void doExecute(Task task, ExplainRequest request, ActionListener listener) { request.nowInMillis = System.currentTimeMillis(); - super.doExecute(request, listener); + super.doExecute(task, request, listener); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index f86d0f1d273f0..ef0d19a265583 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteClusterService; @@ -63,8 +64,7 @@ public TransportFieldCapabilitiesAction(Settings settings, TransportService tran } @Override - protected void doExecute(FieldCapabilitiesRequest request, - final ActionListener listener) { + protected void doExecute(Task task, FieldCapabilitiesRequest request, final ActionListener listener) { final ClusterState clusterState = clusterService.state(); final Map remoteClusterIndices = remoteClusterService.groupIndices(request.indicesOptions(), request.indices(), idx -> indexNameExpressionResolver.hasIndexOrAlias(idx, clusterState)); diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java index f7ad0f6c87fd0..d7770148c95a9 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -53,7 +54,7 @@ public TransportMultiGetAction(Settings settings, TransportService transportServ } @Override - protected void doExecute(final MultiGetRequest request, final ActionListener listener) { + protected void doExecute(Task task, final MultiGetRequest request, final ActionListener listener) { ClusterState clusterState = clusterService.state(); clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java index 599d3a3d60f5a..2e898c1895f9a 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.node.NodeService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -48,7 +49,7 @@ public SimulatePipelineTransportAction(Settings settings, ThreadPool threadPool, } @Override - protected void doExecute(SimulatePipelineRequest request, ActionListener listener) { + protected void doExecute(Task task, SimulatePipelineRequest request, ActionListener listener) { final Map source = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2(); final SimulatePipelineRequest.Parsed simulateRequest; diff --git a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java index 48612a68901dd..d3a54bf7e45ba 100644 --- a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; public class TransportMainAction extends HandledTransportAction { @@ -44,7 +45,7 @@ public TransportMainAction(Settings settings, TransportService transportService, } @Override - protected void doExecute(MainRequest request, ActionListener listener) { + protected void doExecute(Task task, MainRequest request, ActionListener listener) { ClusterState clusterState = clusterService.state(); assert Node.NODE_NAME_SETTING.exists(settings); final boolean available = clusterState.getBlocks().hasGlobalBlock(RestStatus.SERVICE_UNAVAILABLE) == false; diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java index fe5ab3d9a379c..f1c9fd5c545fb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; public class TransportClearScrollAction extends HandledTransportAction { @@ -43,7 +44,7 @@ public TransportClearScrollAction(Settings settings, TransportService transportS } @Override - protected void doExecute(ClearScrollRequest request, final ActionListener listener) { + protected void doExecute(Task task, ClearScrollRequest request, final ActionListener listener) { Runnable runnable = new ClearScrollController(request, listener, clusterService.state().nodes(), logger, searchTransportService); runnable.run(); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index df3214af8ea69..b771a135d5f29 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -70,7 +71,7 @@ public TransportMultiSearchAction(Settings settings, ThreadPool threadPool, Tran } @Override - protected void doExecute(MultiSearchRequest request, ActionListener listener) { + protected void doExecute(Task task, MultiSearchRequest request, ActionListener listener) { final long relativeStartTime = relativeTimeProvider.getAsLong(); ClusterState clusterState = clusterService.state(); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 1b9a8353253d1..5c0b2eb39ed51 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -362,11 +362,6 @@ static GroupShardsIterator mergeShardsIterators(GroupShards return new GroupShardsIterator<>(shards); } - @Override - protected final void doExecute(SearchRequest searchRequest, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required"); - } - private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest, GroupShardsIterator shardIterators, SearchTimeProvider timeProvider, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java index 953152eaad003..70a50d44fb0e6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java @@ -50,10 +50,6 @@ public TransportSearchScrollAction(Settings settings, TransportService transport this.searchPhaseController = searchPhaseController; } - @Override - protected final void doExecute(SearchScrollRequest request, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required"); - } @Override protected void doExecute(Task task, SearchScrollRequest request, ActionListener listener) { try { diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java index 85167cfe0f8e9..9db5bfd84b5e3 100644 --- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -123,11 +123,7 @@ public final void execute(Task task, Request request, ActionListener l requestFilterChain.proceed(task, actionName, request, listener); } - protected void doExecute(Task task, Request request, ActionListener listener) { - doExecute(request, listener); - } - - protected abstract void doExecute(Request request, ActionListener listener); + protected abstract void doExecute(Task task, Request request, ActionListener listener); private static class RequestFilterChain implements ActionFilterChain { diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index 1bec46fd1213e..45a65a31390e6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -75,11 +75,6 @@ protected void doExecute(Task task, Request request, ActionListener li new AsyncBroadcastAction(task, request, listener).start(); } - @Override - protected final void doExecute(Request request, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - protected abstract Response newResponse(Request request, AtomicReferenceArray shardsResponses, ClusterState clusterState); protected abstract ShardRequest newShardRequest(int numShards, ShardRouting shard, Request request); diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index 348162b8c33bd..9079238b7b62e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -221,11 +221,6 @@ private Response newResponse( */ protected abstract ClusterBlockException checkRequestBlock(ClusterState state, Request request, String[] concreteIndices); - @Override - protected final void doExecute(Request request, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, Request request, ActionListener listener) { new AsyncAction(task, request, listener).start(); diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 8f198c4b82e6f..934241a8fcb58 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -115,12 +115,6 @@ protected boolean localExecute(Request request) { protected abstract ClusterBlockException checkBlock(Request request, ClusterState state); - @Override - protected final void doExecute(final Request request, ActionListener listener) { - logger.warn("attempt to execute a master node operation without task"); - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, final Request request, ActionListener listener) { new AsyncSingleAction(task, request, listener).start(); diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 6a9ac53f7bebd..b232d849223b9 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -77,12 +77,6 @@ protected TransportNodesAction(Settings settings, String actionName, ThreadPool transportNodeAction, nodeRequest, nodeExecutor, new NodeTransportHandler()); } - @Override - protected final void doExecute(NodesRequest request, ActionListener listener) { - logger.warn("attempt to execute a transport nodes operation without a task"); - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, NodesRequest request, ActionListener listener) { new AsyncAction(task, request, listener).start(); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index 50e0cc3af7f7b..aa3784efdd04f 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -66,12 +66,6 @@ public TransportBroadcastReplicationAction(String name, Supplier reques this.indexNameExpressionResolver = indexNameExpressionResolver; } - - @Override - protected final void doExecute(final Request request, final ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, Request request, ActionListener listener) { final ClusterState clusterState = clusterService.state(); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index c31ee81a802a5..53d9752f4edc6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -163,11 +163,6 @@ protected void registerRequestHandlers(String actionName, TransportService trans new ReplicaOperationTransportHandler()); } - @Override - protected final void doExecute(Request request, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, Request request, ActionListener listener) { new ReroutePhase((ReplicationTask) task, request, listener).run(); diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index 2d8ccb6e524f4..e8e710aa81f2c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -74,7 +74,7 @@ protected TransportInstanceSingleOperationAction(Settings settings, String actio } @Override - protected void doExecute(Request request, ActionListener listener) { + protected void doExecute(Task task, Request request, ActionListener listener) { new AsyncSingleAction(request, listener).start(); } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 7116061640f3e..7a83b0c455da4 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -97,7 +97,7 @@ protected boolean isSubAction() { } @Override - protected void doExecute(Request request, ActionListener listener) { + protected void doExecute(Task task, Request request, ActionListener listener) { new AsyncSingleAction(request, listener).start(); } diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index ee116d9f957c6..38a0d96600ce8 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -90,12 +90,6 @@ protected TransportTasksAction(Settings settings, String actionName, ClusterServ transportService.registerRequestHandler(transportNodeAction, NodeTaskRequest::new, nodeExecutor, new NodeTransportHandler()); } - @Override - protected final void doExecute(TasksRequest request, ActionListener listener) { - logger.warn("attempt to execute a transport tasks operation without a task"); - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, TasksRequest request, ActionListener listener) { new AsyncAction(task, request, listener).start(); diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java index f66d843ea6db4..b7ee052b2ba82 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -54,7 +55,7 @@ public TransportMultiTermVectorsAction(Settings settings, TransportService trans } @Override - protected void doExecute(final MultiTermVectorsRequest request, final ActionListener listener) { + protected void doExecute(Task task, final MultiTermVectorsRequest request, final ActionListener listener) { ClusterState clusterState = clusterService.state(); clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ); diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 9faf22d464cbb..299a2ce812396 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -53,6 +53,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -111,13 +112,13 @@ public static void resolveAndValidateRouting(MetaData metaData, String concreteI } @Override - protected void doExecute(final UpdateRequest request, final ActionListener listener) { + protected void doExecute(Task task, final UpdateRequest request, final ActionListener listener) { // if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) { client.admin().indices().create(new CreateIndexRequest().index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener() { @Override public void onResponse(CreateIndexResponse result) { - innerExecute(request, listener); + innerExecute(task, request, listener); } @Override @@ -125,7 +126,7 @@ public void onFailure(Exception e) { if (unwrapCause(e) instanceof ResourceAlreadyExistsException) { // we have the index, do it try { - innerExecute(request, listener); + innerExecute(task, request, listener); } catch (Exception inner) { inner.addSuppressed(e); listener.onFailure(inner); @@ -136,12 +137,12 @@ public void onFailure(Exception e) { } }); } else { - innerExecute(request, listener); + innerExecute(task, request, listener); } } - private void innerExecute(final UpdateRequest request, final ActionListener listener) { - super.doExecute(request, listener); + private void innerExecute(final Task task, final UpdateRequest request, final ActionListener listener) { + super.doExecute(task, request, listener); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 1767358eca8e4..1fa4197e74900 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest.Method; import org.elasticsearch.rest.action.RestMainAction; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -84,7 +85,7 @@ protected FakeTransportAction(Settings settings, String actionName, ActionFilter } @Override - protected void doExecute(FakeRequest request, ActionListener listener) { + protected void doExecute(Task task, FakeRequest request, ActionListener listener) { } } class FakeAction extends Action { diff --git a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java index 654a4a3649c35..2c2694116b216 100644 --- a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; @@ -69,7 +70,7 @@ public void testMainActionClusterAvailable() { x -> null, null, Collections.emptySet()); TransportMainAction action = new TransportMainAction(settings, transportService, mock(ActionFilters.class), clusterService); AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(new MainRequest(), new ActionListener() { + action.doExecute(mock(Task.class), new MainRequest(), new ActionListener() { @Override public void onResponse(MainResponse mainResponse) { responseRef.set(mainResponse); diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index 94bc6b01ec168..fc3fb34a6cb19 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -106,7 +107,7 @@ private void runTestTook(boolean controlledClock) throws Exception { TransportMultiSearchAction action = createTransportMultiSearchAction(controlledClock, expected); - action.doExecute(multiSearchRequest, new ActionListener() { + action.doExecute(mock(Task.class), multiSearchRequest, new ActionListener() { @Override public void onResponse(MultiSearchResponse multiSearchResponse) { if (controlledClock) { diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 479ed2ad60d51..9df73c8c95543 100644 --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -83,7 +83,7 @@ public void testActionFiltersRequest() throws ExecutionException, InterruptedExc new TransportAction(Settings.EMPTY, actionName, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override - protected void doExecute(TestRequest request, ActionListener listener) { + protected void doExecute(Task task, TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); } }; @@ -160,7 +160,7 @@ public void exe TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override - protected void doExecute(TestRequest request, ActionListener listener) { + protected void doExecute(Task task, TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); } }; diff --git a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index f473188a5424b..a689de9a5d324 100644 --- a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.client.AbstractClientHeadersTestCase; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; @@ -63,7 +64,7 @@ private InternalTransportAction(Settings settings, String actionName, ThreadPool } @Override - protected void doExecute(ActionRequest request, ActionListener listener) { + protected void doExecute(Task task, ActionRequest request, ActionListener listener) { listener.onFailure(new InternalException(actionName)); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java index 23dd0e12d44ff..b149fa300832b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.license.XPackInfoResponse; import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; import org.elasticsearch.license.XPackInfoResponse.LicenseInfo; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackBuild; import org.elasticsearch.xpack.core.XPackFeatureSet; @@ -37,7 +38,7 @@ public TransportXPackInfoAction(Settings settings, TransportService transportSer } @Override - protected void doExecute(XPackInfoRequest request, ActionListener listener) { + protected void doExecute(Task task, XPackInfoRequest request, ActionListener listener) { XPackInfoResponse.BuildInfo buildInfo = null; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java index a70d0693d5b37..9337f7f6b0c22 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.ssl.cert.CertificateInfo; @@ -32,7 +33,7 @@ public TransportGetCertificateInfoAction(Settings settings, TransportService tra } @Override - protected void doExecute(GetCertificateInfoAction.Request request, + protected void doExecute(Task task, GetCertificateInfoAction.Request request, ActionListener listener) { try { Collection certificates = sslService.getLoadedCertificates(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java index e17f7a48cbfeb..01991670d5565 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.XPackInfoResponse; import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackFeatureSet; @@ -82,7 +83,7 @@ public void testDoExecute() throws Exception { final CountDownLatch latch = new CountDownLatch(1); final AtomicReference response = new AtomicReference<>(); final AtomicReference error = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(XPackInfoResponse infoResponse) { response.set(infoResponse); diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index d45dd640a49ff..4eb136040e988 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -35,6 +35,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; @@ -92,7 +93,7 @@ public TransportGraphExploreAction(Settings settings, ThreadPool threadPool, Nod } @Override - protected void doExecute(GraphExploreRequest request, ActionListener listener) { + protected void doExecute(Task task, GraphExploreRequest request, ActionListener listener) { if (licenseState.isGraphAllowed()) { new AsyncGraphAction(request, listener).start(); } else { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java index 38d88341ce3de..9c712efe693ca 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction; @@ -47,7 +48,7 @@ public TransportDeleteCalendarAction(Settings settings, TransportService transpo } @Override - protected void doExecute(DeleteCalendarAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteCalendarAction.Request request, ActionListener listener) { final String calendarId = request.getCalendarId(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java index 7b2311eba2d2d..52896751de1d3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarEventAction; @@ -50,7 +51,8 @@ public TransportDeleteCalendarEventAction(Settings settings, TransportService tr } @Override - protected void doExecute(DeleteCalendarEventAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteCalendarEventAction.Request request, + ActionListener listener) { final String eventId = request.getEventId(); ActionListener calendarListener = ActionListener.wrap( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java index b9ff2cb98b5d6..f7dfb8adb9e2b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; @@ -46,7 +47,8 @@ public TransportDeleteExpiredDataAction(Settings settings, ThreadPool threadPool } @Override - protected void doExecute(DeleteExpiredDataAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteExpiredDataAction.Request request, + ActionListener listener) { logger.info("Deleting expired data"); threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> deleteExpiredData(listener)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java index 79693e2279486..c7d3d64c58cea 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetadata; @@ -52,7 +53,7 @@ public TransportDeleteFilterAction(Settings settings, TransportService transport } @Override - protected void doExecute(DeleteFilterAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteFilterAction.Request request, ActionListener listener) { final String filterId = request.getFilterId(); ClusterState state = clusterService.state(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java index ad22f84f6d468..c63f8a4405b89 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -47,7 +48,8 @@ public TransportDeleteModelSnapshotAction(Settings settings, TransportService tr } @Override - protected void doExecute(DeleteModelSnapshotAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteModelSnapshotAction.Request request, + ActionListener listener) { // Verify the snapshot exists jobProvider.modelSnapshots( request.getJobId(), 0, 1, null, null, null, true, request.getSnapshotId(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java index 7b8128982ee84..63a1efe471a47 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetBucketsAction; import org.elasticsearch.xpack.ml.job.JobManager; @@ -36,7 +37,7 @@ public TransportGetBucketsAction(Settings settings, TransportService transportSe } @Override - protected void doExecute(GetBucketsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetBucketsAction.Request request, ActionListener listener) { jobManager.getJobOrThrowIfUnknown(request.getJobId()); BucketsQueryBuilder query = diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java index 5647c72d44bd2..2e30ad80d859a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.GetCalendarEventsAction; @@ -43,7 +44,7 @@ public TransportGetCalendarEventsAction(Settings settings, TransportService tran } @Override - protected void doExecute(GetCalendarEventsAction.Request request, + protected void doExecute(Task task, GetCalendarEventsAction.Request request, ActionListener listener) { ActionListener calendarExistsListener = ActionListener.wrap( r -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java index 46c252004a3c5..ed837139ade1c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetCalendarsAction; import org.elasticsearch.xpack.core.ml.action.util.PageParams; @@ -33,7 +34,7 @@ public TransportGetCalendarsAction(Settings settings, TransportService transport } @Override - protected void doExecute(GetCalendarsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetCalendarsAction.Request request, ActionListener listener) { final String calendarId = request.getCalendarId(); if (request.getCalendarId() != null && GetCalendarsAction.Request.ALL.equals(request.getCalendarId()) == false) { getCalendar(calendarId, listener); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java index b3a2d9bab0c35..0e0481f394ccf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetCategoriesAction; import org.elasticsearch.xpack.ml.job.JobManager; @@ -35,7 +36,7 @@ public TransportGetCategoriesAction(Settings settings, TransportService transpor } @Override - protected void doExecute(GetCategoriesAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetCategoriesAction.Request request, ActionListener listener) { jobManager.getJobOrThrowIfUnknown(request.getJobId()); Integer from = request.getPageParams() != null ? request.getPageParams().getFrom() : null; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index c8cd7a0d63bb7..83a4c12b819ce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -26,6 +26,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.GetFiltersAction; @@ -56,7 +57,7 @@ public TransportGetFiltersAction(Settings settings, TransportService transportSe } @Override - protected void doExecute(GetFiltersAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetFiltersAction.Request request, ActionListener listener) { final String filterId = request.getFilterId(); if (!Strings.isNullOrEmpty(filterId)) { getFilter(filterId, listener); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java index 9d45559bc111c..125e31fcf63cf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetInfluencersAction; import org.elasticsearch.xpack.ml.job.JobManager; @@ -36,7 +37,7 @@ public TransportGetInfluencersAction(Settings settings, TransportService transpo } @Override - protected void doExecute(GetInfluencersAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetInfluencersAction.Request request, ActionListener listener) { jobManager.getJobOrThrowIfUnknown(request.getJobId()); InfluencersQueryBuilder.InfluencersQuery query = new InfluencersQueryBuilder() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java index d37cdd90e3aeb..b69db8d48d60f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; @@ -35,7 +36,8 @@ public TransportGetModelSnapshotsAction(Settings settings, TransportService tran } @Override - protected void doExecute(GetModelSnapshotsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetModelSnapshotsAction.Request request, + ActionListener listener) { logger.debug("Get model snapshots for job {} snapshot ID {}. from = {}, size = {}" + " start = '{}', end='{}', sort={} descending={}", request.getJobId(), request.getSnapshotId(), request.getPageParams().getFrom(), request.getPageParams().getSize(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index d412129b47a31..c0792a45b29d3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.min.Min; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetOverallBucketsAction; @@ -74,7 +75,8 @@ public TransportGetOverallBucketsAction(Settings settings, ThreadPool threadPool } @Override - protected void doExecute(GetOverallBucketsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetOverallBucketsAction.Request request, + ActionListener listener) { QueryPage jobsPage = jobManager.expandJobs(request.getJobId(), request.allowNoJobs(), clusterService.state()); if (jobsPage.count() == 0) { listener.onResponse(new GetOverallBucketsAction.Response()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java index 6943cd9a01c5e..b1556ba6e45c7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; import org.elasticsearch.xpack.ml.job.JobManager; @@ -36,7 +37,7 @@ public TransportGetRecordsAction(Settings settings, TransportService transportSe } @Override - protected void doExecute(GetRecordsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetRecordsAction.Request request, ActionListener listener) { jobManager.getJobOrThrowIfUnknown(request.getJobId()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java index b8b57a865e177..5a54e51f4ba2f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.action.MlInfoAction; @@ -37,7 +38,7 @@ public TransportMlInfoAction(Settings settings, TransportService transportServic } @Override - protected void doExecute(MlInfoAction.Request request, ActionListener listener) { + protected void doExecute(Task task, MlInfoAction.Request request, ActionListener listener) { Map info = new HashMap<>(); info.put("defaults", defaults()); info.put("limits", limits()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java index 49cb7dc45c954..c1279248908a2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction; @@ -53,7 +54,7 @@ public TransportPostCalendarEventsAction(Settings settings, TransportService tra } @Override - protected void doExecute(PostCalendarEventsAction.Request request, + protected void doExecute(Task task, PostCalendarEventsAction.Request request, ActionListener listener) { List events = request.getScheduledEvents(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java index 867372600d7a4..cc3a34f20f570 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; @@ -51,7 +52,7 @@ public TransportPreviewDatafeedAction(Settings settings, ThreadPool threadPool, } @Override - protected void doExecute(PreviewDatafeedAction.Request request, ActionListener listener) { + protected void doExecute(Task task, PreviewDatafeedAction.Request request, ActionListener listener) { MlMetadata mlMetadata = MlMetadata.getMlMetadata(clusterService.state()); DatafeedConfig datafeed = mlMetadata.getDatafeed(request.getDatafeedId()); if (datafeed == null) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java index 82caa9a35a6c5..7611a27cd5a1d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; @@ -46,7 +47,7 @@ public TransportPutCalendarAction(Settings settings, TransportService transportS } @Override - protected void doExecute(PutCalendarAction.Request request, ActionListener listener) { + protected void doExecute(Task task, PutCalendarAction.Request request, ActionListener listener) { Calendar calendar = request.getCalendar(); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, calendar.documentId()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java index 011606f3c14ed..19bf35aaed617 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PutFilterAction; @@ -46,7 +47,7 @@ public TransportPutFilterAction(Settings settings, TransportService transportSer } @Override - protected void doExecute(PutFilterAction.Request request, ActionListener listener) { + protected void doExecute(Task task, PutFilterAction.Request request, ActionListener listener) { MlFilter filter = request.getFilter(); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); indexRequest.opType(DocWriteRequest.OpType.CREATE); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java index fd19c7483bc05..c7c9488c26825 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; @@ -33,7 +34,7 @@ public TransportUpdateCalendarJobAction(Settings settings, TransportService tran } @Override - protected void doExecute(UpdateCalendarJobAction.Request request, ActionListener listener) { + protected void doExecute(Task task, UpdateCalendarJobAction.Request request, ActionListener listener) { Set jobIdsToAdd = Strings.tokenizeByCommaToSet(request.getJobIdsToAddExpression()); Set jobIdsToRemove = Strings.tokenizeByCommaToSet(request.getJobIdsToRemoveExpression()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java index 37f550fbb02ea..c8dbf9273829f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PutFilterAction; @@ -62,7 +63,7 @@ public TransportUpdateFilterAction(Settings settings, TransportService transport } @Override - protected void doExecute(UpdateFilterAction.Request request, ActionListener listener) { + protected void doExecute(Task task, UpdateFilterAction.Request request, ActionListener listener) { ActionListener filterListener = ActionListener.wrap(filterWithVersion -> { updateFilter(filterWithVersion, request, listener); }, listener::onFailure); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java index fe5498006d902..8000eaacd4fbe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.messages.Messages; @@ -50,7 +51,8 @@ public TransportUpdateModelSnapshotAction(Settings settings, TransportService tr } @Override - protected void doExecute(UpdateModelSnapshotAction.Request request, ActionListener listener) { + protected void doExecute(Task task, UpdateModelSnapshotAction.Request request, + ActionListener listener) { logger.debug("Received request to update model snapshot [{}] for job [{}]", request.getSnapshotId(), request.getJobId()); jobProvider.getModelSnapshot(request.getJobId(), request.getSnapshotId(), modelSnapshot -> { if (modelSnapshot == null) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java index 284ae505a01e4..c2e89dc78c28e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; @@ -25,7 +26,7 @@ public TransportValidateDetectorAction(Settings settings, TransportService trans } @Override - protected void doExecute(ValidateDetectorAction.Request request, ActionListener listener) { + protected void doExecute(Task task, ValidateDetectorAction.Request request, ActionListener listener) { listener.onResponse(new ValidateDetectorAction.Response(true)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java index dc2a8155c4d94..b644bc1d47067 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; @@ -25,7 +26,8 @@ public TransportValidateJobConfigAction(Settings settings, TransportService tran } @Override - protected void doExecute(ValidateJobConfigAction.Request request, ActionListener listener) { + protected void doExecute(Task task, ValidateJobConfigAction.Request request, + ActionListener listener) { listener.onResponse(new ValidateJobConfigAction.Response(true)); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java index 3ec30552a00b5..c48c33797c41d 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; @@ -51,7 +52,7 @@ public TransportMonitoringBulkAction(Settings settings, ThreadPool threadPool, C } @Override - protected void doExecute(MonitoringBulkRequest request, ActionListener listener) { + protected void doExecute(Task task, MonitoringBulkRequest request, ActionListener listener) { clusterService.state().blocks().globalBlockedRaiseException(ClusterBlockLevel.WRITE); // ignore incoming bulk requests when collection is disabled in ES diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java index 216afa49c1cc0..5f013e8897bde 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.GetRollupCapsAction; @@ -42,7 +43,7 @@ public TransportGetRollupCapsAction(Settings settings, TransportService transpor } @Override - protected void doExecute(GetRollupCapsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetRollupCapsAction.Request request, ActionListener listener) { Map allCaps = getCaps(request.getIndexPattern(), clusterService.state().getMetaData().indices()); listener.onResponse(new GetRollupCapsAction.Response(allCaps)); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 239be32033f13..7be9cc7ae3b20 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -99,7 +99,7 @@ public TransportRollupSearchAction(Settings settings, TransportService transport } @Override - protected void doExecute(SearchRequest request, ActionListener listener) { + protected void doExecute(Task task, SearchRequest request, ActionListener listener) { RollupSearchContext rollupSearchContext = separateIndices(request.indices(), clusterService.state().getMetaData().indices()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java index 62da4def6726a..2a88f5be00779 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleAction; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; @@ -31,7 +32,7 @@ public TransportDeleteRoleAction(Settings settings, ActionFilters actionFilters, } @Override - protected void doExecute(DeleteRoleRequest request, ActionListener listener) { + protected void doExecute(Task task, DeleteRoleRequest request, ActionListener listener) { if (ReservedRolesStore.isReserved(request.name())) { listener.onFailure(new IllegalArgumentException("role [" + request.name() + "] is reserved and cannot be deleted")); return; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java index 87c4ac7d9affa..b930e43e55c8b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.GetRolesAction; import org.elasticsearch.xpack.core.security.action.role.GetRolesRequest; @@ -37,7 +38,7 @@ public TransportGetRolesAction(Settings settings, ActionFilters actionFilters, } @Override - protected void doExecute(final GetRolesRequest request, final ActionListener listener) { + protected void doExecute(Task task, final GetRolesRequest request, final ActionListener listener) { final String[] requestedRoles = request.names(); final boolean specificRolesRequested = requestedRoles != null && requestedRoles.length > 0; final List rolesToSearchFor = new ArrayList<>(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java index 7257dc947f5d8..5edd8764c09a4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; @@ -29,7 +30,7 @@ public TransportPutRoleAction(Settings settings, ActionFilters actionFilters, } @Override - protected void doExecute(final PutRoleRequest request, final ActionListener listener) { + protected void doExecute(Task task, final PutRoleRequest request, final ActionListener listener) { final String name = request.roleDescriptor().getName(); if (ReservedRolesStore.isReserved(name)) { listener.onFailure(new IllegalArgumentException("role [" + name + "] is reserved and cannot be modified.")); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java index d5d30d80a3115..2f8e97661948f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; @@ -30,8 +31,7 @@ public TransportDeleteRoleMappingAction(Settings settings, ActionFilters actionF } @Override - protected void doExecute(DeleteRoleMappingRequest request, - ActionListener listener) { + protected void doExecute(Task task, DeleteRoleMappingRequest request, ActionListener listener) { roleMappingStore.deleteRoleMapping(request, new ActionListener() { @Override public void onResponse(Boolean found) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java index 313d4d02e69b2..86a4f57a6e74d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsAction; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; @@ -35,8 +36,7 @@ public TransportGetRoleMappingsAction(Settings settings, ActionFilters actionFil } @Override - protected void doExecute(final GetRoleMappingsRequest request, - final ActionListener listener) { + protected void doExecute(Task task, final GetRoleMappingsRequest request, final ActionListener listener) { final Set names; if (request.getNames() == null || request.getNames().length == 0) { names = null; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index 8e72a7d76e6ef..057e22d49a50c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; @@ -30,8 +31,7 @@ public TransportPutRoleMappingAction(Settings settings, ActionFilters actionFilt } @Override - protected void doExecute(final PutRoleMappingRequest request, - final ActionListener listener) { + protected void doExecute(Task task, final PutRoleMappingRequest request, final ActionListener listener) { roleMappingStore.putRoleMapping(request, ActionListener.wrap( created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java index 3d0965b96aa9c..d2507d51d0e88 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateAction; @@ -46,8 +47,7 @@ public TransportSamlAuthenticateAction(Settings settings, ThreadPool threadPool, } @Override - protected void doExecute(SamlAuthenticateRequest request, - ActionListener listener) { + protected void doExecute(Task task, SamlAuthenticateRequest request, ActionListener listener) { final SamlToken saml = new SamlToken(request.getSaml(), request.getValidRequestIds()); logger.trace("Attempting to authenticate SamlToken [{}]", saml); final ThreadContext threadContext = threadPool.getThreadContext(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java index 778364bf5c1a5..00caaf6dacff3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionAction; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionRequest; @@ -54,8 +55,7 @@ public TransportSamlInvalidateSessionAction(Settings settings, TransportService } @Override - protected void doExecute(SamlInvalidateSessionRequest request, - ActionListener listener) { + protected void doExecute(Task task, SamlInvalidateSessionRequest request, ActionListener listener) { List realms = findSamlRealms(this.realms, request.getRealmName(), request.getAssertionConsumerServiceURL()); if (realms.isEmpty()) { listener.onFailure(SamlUtils.samlException("Cannot find any matching realm for [{}]", request)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java index 43873c5bcadf4..63931d119e0f2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutAction; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutRequest; @@ -48,8 +49,7 @@ public TransportSamlLogoutAction(Settings settings, TransportService transportSe } @Override - protected void doExecute(SamlLogoutRequest request, - ActionListener listener) { + protected void doExecute(Task task, SamlLogoutRequest request, ActionListener listener) { invalidateRefreshToken(request.getRefreshToken(), ActionListener.wrap(ignore -> { try { final String token = request.getToken(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java index 58eb5ccc59ce1..48330bf63cd6c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationAction; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationRequest; @@ -42,9 +43,9 @@ public TransportSamlPrepareAuthenticationAction(Settings settings, TransportServ } @Override - protected void doExecute(SamlPrepareAuthenticationRequest request, + protected void doExecute(Task task, SamlPrepareAuthenticationRequest request, ActionListener listener) { - List realms = findSamlRealms(this.realms, request.getRealmName(), request.getAssertionConsumerServiceURL() ); + List realms = findSamlRealms(this.realms, request.getRealmName(), request.getAssertionConsumerServiceURL()); if (realms.isEmpty()) { listener.onFailure(SamlUtils.samlException("Cannot find any matching realm for [{}]", request)); } else if (realms.size() > 1) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java index 60d3086763a09..358f6aee712df 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.CreateTokenAction; @@ -46,7 +47,7 @@ public TransportCreateTokenAction(Settings settings, ThreadPool threadPool, Tran } @Override - protected void doExecute(CreateTokenRequest request, ActionListener listener) { + protected void doExecute(Task task, CreateTokenRequest request, ActionListener listener) { Authentication originatingAuthentication = Authentication.getAuthentication(threadPool.getThreadContext()); try (ThreadContext.StoredContext ignore = threadPool.getThreadContext().stashContext()) { final UsernamePasswordToken authToken = new UsernamePasswordToken(request.getUsername(), request.getPassword()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java index 7b280087d617b..ecc97399df862 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenAction; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenRequest; @@ -32,7 +33,7 @@ public TransportInvalidateTokenAction(Settings settings, TransportService transp } @Override - protected void doExecute(InvalidateTokenRequest request, ActionListener listener) { + protected void doExecute(Task task, InvalidateTokenRequest request, ActionListener listener) { final ActionListener invalidateListener = ActionListener.wrap(created -> listener.onResponse(new InvalidateTokenResponse(created)), listener::onFailure); if (request.getTokenType() == InvalidateTokenRequest.Type.ACCESS_TOKEN) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java index 601ee944dd82b..c84fd58830725 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequest; import org.elasticsearch.xpack.core.security.action.token.CreateTokenResponse; @@ -30,7 +31,7 @@ public TransportRefreshTokenAction(Settings settings, TransportService transport } @Override - protected void doExecute(CreateTokenRequest request, ActionListener listener) { + protected void doExecute(Task task, CreateTokenRequest request, ActionListener listener) { tokenService.refreshToken(request.getRefreshToken(), ActionListener.wrap(tuple -> { final String tokenStr = tokenService.getUserTokenString(tuple.v1()); final String scope = getResponseScopeValue(request.getScope()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java index 6a7321bd766de..57510ce116f7d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; @@ -35,7 +36,7 @@ public TransportAuthenticateAction(Settings settings, TransportService transport } @Override - protected void doExecute(AuthenticateRequest request, ActionListener listener) { + protected void doExecute(Task task, AuthenticateRequest request, ActionListener listener) { final User runAsUser = securityContext.getUser(); final User authUser = runAsUser == null ? null : runAsUser.authenticatedUser(); if (authUser == null) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java index 78b4ae0193655..7a42cd5fdea97 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; @@ -31,7 +32,7 @@ public TransportChangePasswordAction(Settings settings, TransportService transpo } @Override - protected void doExecute(ChangePasswordRequest request, ActionListener listener) { + protected void doExecute(Task task, ChangePasswordRequest request, ActionListener listener) { final String username = request.username(); if (AnonymousUser.isAnonymousUsername(username, settings)) { listener.onFailure(new IllegalArgumentException("user [" + username + "] is anonymous and cannot be modified via the API")); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java index dc78f64163e5f..36efdf3bd1737 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.DeleteUserAction; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; @@ -35,7 +36,7 @@ public TransportDeleteUserAction(Settings settings, ActionFilters actionFilters, } @Override - protected void doExecute(DeleteUserRequest request, final ActionListener listener) { + protected void doExecute(Task task, DeleteUserRequest request, final ActionListener listener) { final String username = request.username(); if (ClientReservedRealm.isReserved(username, settings)) { if (AnonymousUser.isAnonymousUsername(username, settings)) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java index 49e8c9d96aba9..7e17cda75f0ab 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.GetUsersAction; import org.elasticsearch.xpack.core.security.action.user.GetUsersRequest; @@ -43,7 +44,7 @@ public TransportGetUsersAction(Settings settings, ActionFilters actionFilters, } @Override - protected void doExecute(final GetUsersRequest request, final ActionListener listener) { + protected void doExecute(Task task, final GetUsersRequest request, final ActionListener listener) { final String[] requestedUsers = request.usernames(); final boolean specificUsersRequested = requestedUsers != null && requestedUsers.length > 0; final List usersToSearchFor = new ArrayList<>(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java index 1f6d0cd16decf..9571b022e0a67 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; @@ -55,7 +56,7 @@ public TransportHasPrivilegesAction(Settings settings, ThreadPool threadPool, Tr } @Override - protected void doExecute(HasPrivilegesRequest request, ActionListener listener) { + protected void doExecute(Task task, HasPrivilegesRequest request, ActionListener listener) { final String username = request.username(); final User user = Authentication.getAuthentication(threadPool.getThreadContext()).getUser(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java index 85411b0e75f89..ebc1612afca1b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; @@ -34,7 +35,7 @@ public TransportPutUserAction(Settings settings, ActionFilters actionFilters, } @Override - protected void doExecute(final PutUserRequest request, final ActionListener listener) { + protected void doExecute(Task task, final PutUserRequest request, final ActionListener listener) { final String username = request.username(); if (ClientReservedRealm.isReserved(username, settings)) { if (AnonymousUser.isAnonymousUsername(username, settings)) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java index f4e99d364ec0b..cbf505d9c6751 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.SetEnabledAction; @@ -38,7 +39,7 @@ public TransportSetEnabledAction(Settings settings, ThreadPool threadPool, Trans } @Override - protected void doExecute(SetEnabledRequest request, ActionListener listener) { + protected void doExecute(Task task, SetEnabledRequest request, ActionListener listener) { final String username = request.username(); // make sure the user is not disabling themselves if (Authentication.getAuthentication(threadPool.getThreadContext()).getUser().principal().equals(request.username())) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java index fba4afe47911e..572e948b26e80 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; @@ -51,7 +52,7 @@ public void testReservedRole() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteRoleResponse deleteRoleResponse) { responseRef.set(deleteRoleResponse); @@ -94,7 +95,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteRoleResponse deleteRoleResponse) { responseRef.set(deleteRoleResponse); @@ -138,7 +139,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteRoleResponse deleteRoleResponse) { responseRef.set(deleteRoleResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java index 27ae467c786db..672a24eb45d39 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.GetRolesRequest; @@ -64,7 +65,7 @@ public void testReservedRoles() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetRolesResponse response) { responseRef.set(response); @@ -105,7 +106,7 @@ public void testStoreRoles() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetRolesResponse response) { responseRef.set(response); @@ -173,7 +174,7 @@ public void testGetAllOrMix() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetRolesResponse response) { responseRef.set(response); @@ -221,7 +222,7 @@ public void testException() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetRolesResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java index 8392f92e0c31f..eb606314788c9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; @@ -51,7 +52,7 @@ public void testReservedRole() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutRoleResponse response) { responseRef.set(response); @@ -93,7 +94,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutRoleResponse response) { responseRef.set(response); @@ -135,7 +136,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutRoleResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java index ea6713bb85fe9..d10020cd78b3f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; @@ -67,7 +68,7 @@ public void testGetSingleRole() throws Exception { final ExpressionRoleMapping mapping = mock(ExpressionRoleMapping.class); result = Collections.singletonList(mapping); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); assertThat(future.get(), notNullValue()); assertThat(future.get().mappings(), arrayContaining(mapping)); assertThat(namesRef.get(), containsInAnyOrder("everyone")); @@ -83,7 +84,7 @@ public void testGetMultipleNamedRoles() throws Exception { final ExpressionRoleMapping mapping3 = mock(ExpressionRoleMapping.class); result = Arrays.asList(mapping1, mapping2, mapping3); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final GetRoleMappingsResponse response = future.get(); assertThat(response, notNullValue()); @@ -101,7 +102,7 @@ public void testGetAllRoles() throws Exception { final ExpressionRoleMapping mapping3 = mock(ExpressionRoleMapping.class); result = Arrays.asList(mapping1, mapping2, mapping3); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final GetRoleMappingsResponse response = future.get(); assertThat(response, notNullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index b105c0d5d0ea5..68a957c9c3c14 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; @@ -84,7 +85,7 @@ private PutRoleMappingResponse put(String name, FieldExpression expression, Stri request.setMetadata(metadata); request.setEnabled(true); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); return future.get(); } } \ No newline at end of file diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index b9232903f52ff..bec6038b65580 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.ThreadPool; @@ -240,7 +241,7 @@ public void testInvalidateCorrectTokensFromLogoutRequest() throws Exception { request.setRealmName(samlRealm.name()); request.setQueryString("SAMLRequest=foo"); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final SamlInvalidateSessionResponse response = future.get(); assertThat(response, notNullValue()); assertThat(response.getCount(), equalTo(2)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index 6d177d89021ab..1185fa29986b0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -228,7 +229,7 @@ public void testLogoutInvalidatesToken() throws Exception { final SamlLogoutRequest request = new SamlLogoutRequest(); request.setToken(tokenString); final PlainActionFuture listener = new PlainActionFuture<>(); - action.doExecute(request, listener); + action.doExecute(mock(Task.class), request, listener); final SamlLogoutResponse response = listener.get(); assertThat(response, notNullValue()); assertThat(response.getRedirectUrl(), notNullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java index 66e2192eee5dd..96b8b4fe25764 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -43,7 +44,7 @@ public void testInternalUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(new AuthenticateRequest(), new ActionListener() { + action.doExecute(mock(Task.class), new AuthenticateRequest(), new ActionListener() { @Override public void onResponse(AuthenticateResponse authenticateResponse) { responseRef.set(authenticateResponse); @@ -69,7 +70,7 @@ public void testNullUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(new AuthenticateRequest(), new ActionListener() { + action.doExecute(mock(Task.class), new AuthenticateRequest(), new ActionListener() { @Override public void onResponse(AuthenticateResponse authenticateResponse) { responseRef.set(authenticateResponse); @@ -97,7 +98,7 @@ public void testValidUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(new AuthenticateRequest(), new ActionListener() { + action.doExecute(mock(Task.class), new AuthenticateRequest(), new ActionListener() { @Override public void onResponse(AuthenticateResponse authenticateResponse) { responseRef.set(authenticateResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java index 4aa68c24c8d71..8808ab92a41f9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.transport.TransportService; @@ -59,7 +60,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(ChangePasswordResponse changePasswordResponse) { responseRef.set(changePasswordResponse); @@ -90,7 +91,7 @@ public void testInternalUsers() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(ChangePasswordResponse changePasswordResponse) { responseRef.set(changePasswordResponse); @@ -128,7 +129,7 @@ public void testValidUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(ChangePasswordResponse changePasswordResponse) { responseRef.set(changePasswordResponse); @@ -169,7 +170,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(ChangePasswordResponse changePasswordResponse) { responseRef.set(changePasswordResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java index 9c61d0cde520c..ed7f9cff6e25e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; @@ -53,7 +54,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); @@ -82,7 +83,7 @@ public void testInternalUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); @@ -112,7 +113,7 @@ public void testReservedUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); @@ -152,7 +153,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); @@ -192,7 +193,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java index 070fe4e64317b..b11a57c2d678a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -99,7 +100,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -134,7 +135,7 @@ public void testInternalUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -178,7 +179,7 @@ public void testReservedUsersOnly() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -225,7 +226,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -271,7 +272,7 @@ public void testGetStoreOnlyUsers() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -319,7 +320,7 @@ public void testException() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java index cb0f643fd89d7..9f4d7c957b46c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.mock.orig.Mockito; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -92,7 +93,7 @@ public void testNamedIndexPrivilegesMatchApplicableActions() throws Exception { .privileges(DeleteAction.NAME, IndexAction.NAME) .build()); final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); assertThat(response, notNullValue()); @@ -128,7 +129,7 @@ public void testMatchSubsetOfPrivileges() throws Exception { .privileges("delete", "index", "manage") .build()); final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); assertThat(response, notNullValue()); @@ -230,7 +231,7 @@ public void testWildcardHandling() throws Exception { .build() ); final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); assertThat(response, notNullValue()); @@ -298,7 +299,7 @@ private HasPrivilegesResponse hasPrivileges(RoleDescriptor.IndicesPrivileges ind request.clusterPrivileges(clusterPrivileges); request.indexPrivileges(indicesPrivileges); final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); assertThat(response, notNullValue()); return response; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java index 5f9a3f5243160..86a70bdf7e08e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.threadpool.ThreadPool; @@ -65,7 +66,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); @@ -94,7 +95,7 @@ public void testSystemUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); @@ -135,7 +136,7 @@ public void testReservedUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); @@ -179,7 +180,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); @@ -220,7 +221,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java index a8076c21cdb49..1c2eb8a9a1503 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -72,7 +73,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); @@ -110,7 +111,7 @@ public void testInternalUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); @@ -160,7 +161,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); @@ -212,7 +213,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); @@ -252,7 +253,7 @@ public void testUserModifyingThemselves() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java index b6a53ae95f098..7a216f3a2bf9f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.session.Configuration; @@ -33,7 +34,7 @@ public TransportSqlClearCursorAction(Settings settings, TransportService transpo } @Override - protected void doExecute(SqlClearCursorRequest request, ActionListener listener) { + protected void doExecute(Task task, SqlClearCursorRequest request, ActionListener listener) { sqlLicenseChecker.checkIfSqlAllowed(request.mode()); operation(planExecutor, request, listener); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index bc96b3de97307..7993f00d71aee 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.proto.ColumnInfo; @@ -43,7 +44,7 @@ public TransportSqlQueryAction(Settings settings, TransportService transportServ } @Override - protected void doExecute(SqlQueryRequest request, ActionListener listener) { + protected void doExecute(Task task, SqlQueryRequest request, ActionListener listener) { sqlLicenseChecker.checkIfSqlAllowed(request.mode()); operation(planExecutor, request, listener); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java index 61772ce6bb1e9..4ef7c14ab01f3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.session.Configuration; @@ -33,7 +34,7 @@ public TransportSqlTranslateAction(Settings settings, TransportService transport } @Override - protected void doExecute(SqlTranslateRequest request, ActionListener listener) { + protected void doExecute(Task task, SqlTranslateRequest request, ActionListener listener) { sqlLicenseChecker.checkIfSqlAllowed(request.mode()); Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(), diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java index 3d493299d7036..56ce9f6d4a280 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java @@ -35,11 +35,13 @@ protected String executor() { } @Override - protected void doExecute(Task task, final Request request, ActionListener listener) { + protected final void doExecute(Task task, final Request request, ActionListener listener) { if (licenseState.isWatcherAllowed()) { - super.doExecute(task, request, listener); + doExecute(request, listener); } else { listener.onFailure(LicenseUtils.newComplianceException(XPackField.WATCHER)); } } + + protected abstract void doExecute(Request request, ActionListener listener); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java index 3f430e2ec2479..d7ff25b623f50 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchAction; import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; @@ -42,7 +43,7 @@ public TransportDeleteWatchAction(Settings settings, TransportService transportS } @Override - protected void doExecute(DeleteWatchRequest request, ActionListener listener) { + protected void doExecute(Task task, DeleteWatchRequest request, ActionListener listener) { DeleteRequest deleteRequest = new DeleteRequest(Watch.INDEX, Watch.DOC_TYPE, request.getId()); deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, deleteRequest, From eb81a305ae36beed75dff7e61b05ec10c69f339b Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 22 Jun 2018 15:39:34 -0700 Subject: [PATCH 30/34] [DOCS] Move monitoring to docs folder (#31477) --- docs/reference/index.asciidoc | 2 +- .../docs/en => docs/reference}/monitoring/collectors.asciidoc | 1 + .../docs/en => docs/reference}/monitoring/exporters.asciidoc | 1 + .../docs/en => docs/reference}/monitoring/http-export.asciidoc | 1 + {x-pack/docs/en => docs/reference}/monitoring/index.asciidoc | 1 + .../docs/en => docs/reference}/monitoring/local-export.asciidoc | 1 + .../docs/en => docs/reference}/monitoring/pause-export.asciidoc | 1 + x-pack/docs/en/monitoring/configuring-monitoring.asciidoc | 1 + x-pack/docs/en/monitoring/indices.asciidoc | 1 + 9 files changed, 9 insertions(+), 1 deletion(-) rename {x-pack/docs/en => docs/reference}/monitoring/collectors.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/monitoring/exporters.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/monitoring/http-export.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/monitoring/index.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/monitoring/local-export.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/monitoring/pause-export.asciidoc (98%) diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index adbdc01db1e72..ef8e345303137 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -59,7 +59,7 @@ include::ingest.asciidoc[] include::{xes-repo-dir}/sql/index.asciidoc[] -include::{xes-repo-dir}/monitoring/index.asciidoc[] +include::monitoring/index.asciidoc[] include::{xes-repo-dir}/rollup/index.asciidoc[] diff --git a/x-pack/docs/en/monitoring/collectors.asciidoc b/docs/reference/monitoring/collectors.asciidoc similarity index 99% rename from x-pack/docs/en/monitoring/collectors.asciidoc rename to docs/reference/monitoring/collectors.asciidoc index 336f204b5eefb..bd48d1287006a 100644 --- a/x-pack/docs/en/monitoring/collectors.asciidoc +++ b/docs/reference/monitoring/collectors.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[es-monitoring-collectors]] == Collectors diff --git a/x-pack/docs/en/monitoring/exporters.asciidoc b/docs/reference/monitoring/exporters.asciidoc similarity index 99% rename from x-pack/docs/en/monitoring/exporters.asciidoc rename to docs/reference/monitoring/exporters.asciidoc index e7727f1e97af0..2a7729eee9425 100644 --- a/x-pack/docs/en/monitoring/exporters.asciidoc +++ b/docs/reference/monitoring/exporters.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[es-monitoring-exporters]] == Exporters diff --git a/x-pack/docs/en/monitoring/http-export.asciidoc b/docs/reference/monitoring/http-export.asciidoc similarity index 99% rename from x-pack/docs/en/monitoring/http-export.asciidoc rename to docs/reference/monitoring/http-export.asciidoc index db1dbe2a29c5b..4dfe1a0c537ea 100644 --- a/x-pack/docs/en/monitoring/http-export.asciidoc +++ b/docs/reference/monitoring/http-export.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[http-exporter]] === HTTP Exporters diff --git a/x-pack/docs/en/monitoring/index.asciidoc b/docs/reference/monitoring/index.asciidoc similarity index 99% rename from x-pack/docs/en/monitoring/index.asciidoc rename to docs/reference/monitoring/index.asciidoc index 6b8ecc5038ea0..d6a55f44585dd 100644 --- a/x-pack/docs/en/monitoring/index.asciidoc +++ b/docs/reference/monitoring/index.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[es-monitoring]] = Monitoring {es} diff --git a/x-pack/docs/en/monitoring/local-export.asciidoc b/docs/reference/monitoring/local-export.asciidoc similarity index 99% rename from x-pack/docs/en/monitoring/local-export.asciidoc rename to docs/reference/monitoring/local-export.asciidoc index 12d0ab5ea9f81..2bc757f07ecc8 100644 --- a/x-pack/docs/en/monitoring/local-export.asciidoc +++ b/docs/reference/monitoring/local-export.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[local-exporter]] === Local Exporters diff --git a/x-pack/docs/en/monitoring/pause-export.asciidoc b/docs/reference/monitoring/pause-export.asciidoc similarity index 98% rename from x-pack/docs/en/monitoring/pause-export.asciidoc rename to docs/reference/monitoring/pause-export.asciidoc index d26799c6892c3..128e72a463c2d 100644 --- a/x-pack/docs/en/monitoring/pause-export.asciidoc +++ b/docs/reference/monitoring/pause-export.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[pause-export]] == Pausing Data Collection diff --git a/x-pack/docs/en/monitoring/configuring-monitoring.asciidoc b/x-pack/docs/en/monitoring/configuring-monitoring.asciidoc index 1712c88380b7a..e705100e05e67 100644 --- a/x-pack/docs/en/monitoring/configuring-monitoring.asciidoc +++ b/x-pack/docs/en/monitoring/configuring-monitoring.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="gold"] [[configuring-monitoring]] == Configuring Monitoring in {es} ++++ diff --git a/x-pack/docs/en/monitoring/indices.asciidoc b/x-pack/docs/en/monitoring/indices.asciidoc index efa9836daa2e9..a27d91d423e8d 100644 --- a/x-pack/docs/en/monitoring/indices.asciidoc +++ b/x-pack/docs/en/monitoring/indices.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[config-monitoring-indices]] === Configuring Indices for Monitoring From 638b9fd88cdbe717d2e13bf4dbbda9ce610bc198 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 22 Jun 2018 15:40:25 -0700 Subject: [PATCH 31/34] [DOCS] Move sql to docs (#31474) --- docs/build.gradle | 82 +++++++++++++++++++ docs/reference/index.asciidoc | 2 +- .../reference}/sql/appendix/index.asciidoc | 0 .../sql/appendix/syntax-reserved.asciidoc | 2 + .../reference}/sql/concepts.asciidoc | 2 + .../reference}/sql/endpoints/cli.asciidoc | 2 + .../reference}/sql/endpoints/index.asciidoc | 0 .../reference}/sql/endpoints/jdbc.asciidoc | 2 + .../reference}/sql/endpoints/rest.asciidoc | 2 + .../sql/endpoints/translate.asciidoc | 2 + .../reference}/sql/functions/index.asciidoc | 2 + .../reference}/sql/getting-started.asciidoc | 2 + .../en => docs/reference}/sql/index.asciidoc | 1 + .../sql/language/data-types.asciidoc | 2 + .../reference}/sql/language/index.asciidoc | 2 + .../language/syntax/describe-table.asciidoc | 2 + .../sql/language/syntax/index.asciidoc | 2 + .../sql/language/syntax/select.asciidoc | 2 + .../sql/language/syntax/show-columns.asciidoc | 2 + .../language/syntax/show-functions.asciidoc | 2 + .../sql/language/syntax/show-tables.asciidoc | 2 + .../reference}/sql/overview.asciidoc | 2 + .../reference}/sql/security.asciidoc | 2 + 23 files changed, 120 insertions(+), 1 deletion(-) rename {x-pack/docs/en => docs/reference}/sql/appendix/index.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/sql/appendix/syntax-reserved.asciidoc (98%) rename {x-pack/docs/en => docs/reference}/sql/concepts.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/sql/endpoints/cli.asciidoc (97%) rename {x-pack/docs/en => docs/reference}/sql/endpoints/index.asciidoc (100%) rename {x-pack/docs/en => docs/reference}/sql/endpoints/jdbc.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/sql/endpoints/rest.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/sql/endpoints/translate.asciidoc (97%) rename {x-pack/docs/en => docs/reference}/sql/functions/index.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/sql/getting-started.asciidoc (98%) rename {x-pack/docs/en => docs/reference}/sql/index.asciidoc (98%) rename {x-pack/docs/en => docs/reference}/sql/language/data-types.asciidoc (98%) rename {x-pack/docs/en => docs/reference}/sql/language/index.asciidoc (87%) rename {x-pack/docs/en => docs/reference}/sql/language/syntax/describe-table.asciidoc (87%) rename {x-pack/docs/en => docs/reference}/sql/language/syntax/index.asciidoc (94%) rename {x-pack/docs/en => docs/reference}/sql/language/syntax/select.asciidoc (99%) rename {x-pack/docs/en => docs/reference}/sql/language/syntax/show-columns.asciidoc (85%) rename {x-pack/docs/en => docs/reference}/sql/language/syntax/show-functions.asciidoc (89%) rename {x-pack/docs/en => docs/reference}/sql/language/syntax/show-tables.asciidoc (89%) rename {x-pack/docs/en => docs/reference}/sql/overview.asciidoc (97%) rename {x-pack/docs/en => docs/reference}/sql/security.asciidoc (98%) diff --git a/docs/build.gradle b/docs/build.gradle index f1d1324192b16..b04016c946eed 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -522,3 +522,85 @@ for (int i = 0; i < 5; i++) { {"index":{}} {"ip": "12.0.0.$i"}""" } +// Used by SQL because it looks SQL-ish +buildRestTests.setups['library'] = ''' + - do: + indices.create: + index: library + body: + settings: + number_of_shards: 1 + number_of_replicas: 1 + mappings: + book: + properties: + name: + type: text + fields: + keyword: + type: keyword + author: + type: text + fields: + keyword: + type: keyword + release_date: + type: date + page_count: + type: short + - do: + bulk: + index: library + type: book + refresh: true + body: | + {"index":{"_id": "Leviathan Wakes"}} + {"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} + {"index":{"_id": "Hyperion"}} + {"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} + {"index":{"_id": "Dune"}} + {"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} + {"index":{"_id": "Dune Messiah"}} + {"name": "Dune Messiah", "author": "Frank Herbert", "release_date": "1969-10-15", "page_count": 331} + {"index":{"_id": "Children of Dune"}} + {"name": "Children of Dune", "author": "Frank Herbert", "release_date": "1976-04-21", "page_count": 408} + {"index":{"_id": "God Emperor of Dune"}} + {"name": "God Emperor of Dune", "author": "Frank Herbert", "release_date": "1981-05-28", "page_count": 454} + {"index":{"_id": "Consider Phlebas"}} + {"name": "Consider Phlebas", "author": "Iain M. Banks", "release_date": "1987-04-23", "page_count": 471} + {"index":{"_id": "Pandora's Star"}} + {"name": "Pandora's Star", "author": "Peter F. Hamilton", "release_date": "2004-03-02", "page_count": 768} + {"index":{"_id": "Revelation Space"}} + {"name": "Revelation Space", "author": "Alastair Reynolds", "release_date": "2000-03-15", "page_count": 585} + {"index":{"_id": "A Fire Upon the Deep"}} + {"name": "A Fire Upon the Deep", "author": "Vernor Vinge", "release_date": "1992-06-01", "page_count": 613} + {"index":{"_id": "Ender's Game"}} + {"name": "Ender's Game", "author": "Orson Scott Card", "release_date": "1985-06-01", "page_count": 324} + {"index":{"_id": "1984"}} + {"name": "1984", "author": "George Orwell", "release_date": "1985-06-01", "page_count": 328} + {"index":{"_id": "Fahrenheit 451"}} + {"name": "Fahrenheit 451", "author": "Ray Bradbury", "release_date": "1953-10-15", "page_count": 227} + {"index":{"_id": "Brave New World"}} + {"name": "Brave New World", "author": "Aldous Huxley", "release_date": "1932-06-01", "page_count": 268} + {"index":{"_id": "Foundation"}} + {"name": "Foundation", "author": "Isaac Asimov", "release_date": "1951-06-01", "page_count": 224} + {"index":{"_id": "The Giver"}} + {"name": "The Giver", "author": "Lois Lowry", "release_date": "1993-04-26", "page_count": 208} + {"index":{"_id": "Slaughterhouse-Five"}} + {"name": "Slaughterhouse-Five", "author": "Kurt Vonnegut", "release_date": "1969-06-01", "page_count": 275} + {"index":{"_id": "The Hitchhiker's Guide to the Galaxy"}} + {"name": "The Hitchhiker's Guide to the Galaxy", "author": "Douglas Adams", "release_date": "1979-10-12", "page_count": 180} + {"index":{"_id": "Snow Crash"}} + {"name": "Snow Crash", "author": "Neal Stephenson", "release_date": "1992-06-01", "page_count": 470} + {"index":{"_id": "Neuromancer"}} + {"name": "Neuromancer", "author": "William Gibson", "release_date": "1984-07-01", "page_count": 271} + {"index":{"_id": "The Handmaid's Tale"}} + {"name": "The Handmaid's Tale", "author": "Margaret Atwood", "release_date": "1985-06-01", "page_count": 311} + {"index":{"_id": "Starship Troopers"}} + {"name": "Starship Troopers", "author": "Robert A. Heinlein", "release_date": "1959-12-01", "page_count": 335} + {"index":{"_id": "The Left Hand of Darkness"}} + {"name": "The Left Hand of Darkness", "author": "Ursula K. Le Guin", "release_date": "1969-06-01", "page_count": 304} + {"index":{"_id": "The Moon is a Harsh Mistress"}} + {"name": "The Moon is a Harsh Mistress", "author": "Robert A. Heinlein", "release_date": "1966-04-01", "page_count": 288} + +''' \ No newline at end of file diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index ef8e345303137..7d51e4aa51264 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -57,7 +57,7 @@ include::index-modules.asciidoc[] include::ingest.asciidoc[] -include::{xes-repo-dir}/sql/index.asciidoc[] +include::sql/index.asciidoc[] include::monitoring/index.asciidoc[] diff --git a/x-pack/docs/en/sql/appendix/index.asciidoc b/docs/reference/sql/appendix/index.asciidoc similarity index 100% rename from x-pack/docs/en/sql/appendix/index.asciidoc rename to docs/reference/sql/appendix/index.asciidoc diff --git a/x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc b/docs/reference/sql/appendix/syntax-reserved.asciidoc similarity index 98% rename from x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc rename to docs/reference/sql/appendix/syntax-reserved.asciidoc index bbdefcbcb54aa..7a502d6eea939 100644 --- a/x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc +++ b/docs/reference/sql/appendix/syntax-reserved.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [appendix] [[sql-syntax-reserved]] = Reserved Keywords diff --git a/x-pack/docs/en/sql/concepts.asciidoc b/docs/reference/sql/concepts.asciidoc similarity index 99% rename from x-pack/docs/en/sql/concepts.asciidoc rename to docs/reference/sql/concepts.asciidoc index f5eab6f37baf8..1dc23e391fab1 100644 --- a/x-pack/docs/en/sql/concepts.asciidoc +++ b/docs/reference/sql/concepts.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-concepts]] == Conventions and Terminology diff --git a/x-pack/docs/en/sql/endpoints/cli.asciidoc b/docs/reference/sql/endpoints/cli.asciidoc similarity index 97% rename from x-pack/docs/en/sql/endpoints/cli.asciidoc rename to docs/reference/sql/endpoints/cli.asciidoc index e04fd96ab7198..0908c2344bb15 100644 --- a/x-pack/docs/en/sql/endpoints/cli.asciidoc +++ b/docs/reference/sql/endpoints/cli.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-cli]] == SQL CLI diff --git a/x-pack/docs/en/sql/endpoints/index.asciidoc b/docs/reference/sql/endpoints/index.asciidoc similarity index 100% rename from x-pack/docs/en/sql/endpoints/index.asciidoc rename to docs/reference/sql/endpoints/index.asciidoc diff --git a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc b/docs/reference/sql/endpoints/jdbc.asciidoc similarity index 99% rename from x-pack/docs/en/sql/endpoints/jdbc.asciidoc rename to docs/reference/sql/endpoints/jdbc.asciidoc index 84182f8b4a521..6a8793f7e24e2 100644 --- a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc +++ b/docs/reference/sql/endpoints/jdbc.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="platinum"] [[sql-jdbc]] == SQL JDBC diff --git a/x-pack/docs/en/sql/endpoints/rest.asciidoc b/docs/reference/sql/endpoints/rest.asciidoc similarity index 99% rename from x-pack/docs/en/sql/endpoints/rest.asciidoc rename to docs/reference/sql/endpoints/rest.asciidoc index fa5093f8de528..f33189303e682 100644 --- a/x-pack/docs/en/sql/endpoints/rest.asciidoc +++ b/docs/reference/sql/endpoints/rest.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-rest]] == SQL REST API diff --git a/x-pack/docs/en/sql/endpoints/translate.asciidoc b/docs/reference/sql/endpoints/translate.asciidoc similarity index 97% rename from x-pack/docs/en/sql/endpoints/translate.asciidoc rename to docs/reference/sql/endpoints/translate.asciidoc index be6a77a3caa44..db450b5f914c8 100644 --- a/x-pack/docs/en/sql/endpoints/translate.asciidoc +++ b/docs/reference/sql/endpoints/translate.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-translate]] == SQL Translate API diff --git a/x-pack/docs/en/sql/functions/index.asciidoc b/docs/reference/sql/functions/index.asciidoc similarity index 99% rename from x-pack/docs/en/sql/functions/index.asciidoc rename to docs/reference/sql/functions/index.asciidoc index dd68370dde3e7..93d201a182828 100644 --- a/x-pack/docs/en/sql/functions/index.asciidoc +++ b/docs/reference/sql/functions/index.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-functions]] == Functions and Operators diff --git a/x-pack/docs/en/sql/getting-started.asciidoc b/docs/reference/sql/getting-started.asciidoc similarity index 98% rename from x-pack/docs/en/sql/getting-started.asciidoc rename to docs/reference/sql/getting-started.asciidoc index 24f01910551bb..7d1bd33e8a035 100644 --- a/x-pack/docs/en/sql/getting-started.asciidoc +++ b/docs/reference/sql/getting-started.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-getting-started]] == Getting Started with SQL diff --git a/x-pack/docs/en/sql/index.asciidoc b/docs/reference/sql/index.asciidoc similarity index 98% rename from x-pack/docs/en/sql/index.asciidoc rename to docs/reference/sql/index.asciidoc index f96b83db08ad6..33b9da9fab93d 100644 --- a/x-pack/docs/en/sql/index.asciidoc +++ b/docs/reference/sql/index.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[xpack-sql]] = SQL Access diff --git a/x-pack/docs/en/sql/language/data-types.asciidoc b/docs/reference/sql/language/data-types.asciidoc similarity index 98% rename from x-pack/docs/en/sql/language/data-types.asciidoc rename to docs/reference/sql/language/data-types.asciidoc index 7e5f045aa6ce9..7f98add97248b 100644 --- a/x-pack/docs/en/sql/language/data-types.asciidoc +++ b/docs/reference/sql/language/data-types.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-data-types]] == Data Types diff --git a/x-pack/docs/en/sql/language/index.asciidoc b/docs/reference/sql/language/index.asciidoc similarity index 87% rename from x-pack/docs/en/sql/language/index.asciidoc rename to docs/reference/sql/language/index.asciidoc index fdf6f3e7950ca..6558e9ad92bf8 100644 --- a/x-pack/docs/en/sql/language/index.asciidoc +++ b/docs/reference/sql/language/index.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-spec]] == SQL Language diff --git a/x-pack/docs/en/sql/language/syntax/describe-table.asciidoc b/docs/reference/sql/language/syntax/describe-table.asciidoc similarity index 87% rename from x-pack/docs/en/sql/language/syntax/describe-table.asciidoc rename to docs/reference/sql/language/syntax/describe-table.asciidoc index 114def470b181..dd2d27a5781d2 100644 --- a/x-pack/docs/en/sql/language/syntax/describe-table.asciidoc +++ b/docs/reference/sql/language/syntax/describe-table.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-describe-table]] === DESCRIBE TABLE diff --git a/x-pack/docs/en/sql/language/syntax/index.asciidoc b/docs/reference/sql/language/syntax/index.asciidoc similarity index 94% rename from x-pack/docs/en/sql/language/syntax/index.asciidoc rename to docs/reference/sql/language/syntax/index.asciidoc index e0e970edae14b..4af8f19d7034b 100644 --- a/x-pack/docs/en/sql/language/syntax/index.asciidoc +++ b/docs/reference/sql/language/syntax/index.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-commands]] == SQL Commands diff --git a/x-pack/docs/en/sql/language/syntax/select.asciidoc b/docs/reference/sql/language/syntax/select.asciidoc similarity index 99% rename from x-pack/docs/en/sql/language/syntax/select.asciidoc rename to docs/reference/sql/language/syntax/select.asciidoc index 35c2bf0737db9..4a7c0534b68a3 100644 --- a/x-pack/docs/en/sql/language/syntax/select.asciidoc +++ b/docs/reference/sql/language/syntax/select.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-select]] === SELECT diff --git a/x-pack/docs/en/sql/language/syntax/show-columns.asciidoc b/docs/reference/sql/language/syntax/show-columns.asciidoc similarity index 85% rename from x-pack/docs/en/sql/language/syntax/show-columns.asciidoc rename to docs/reference/sql/language/syntax/show-columns.asciidoc index 2e7c8f7bfca69..a52c744f17a97 100644 --- a/x-pack/docs/en/sql/language/syntax/show-columns.asciidoc +++ b/docs/reference/sql/language/syntax/show-columns.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-show-columns]] === SHOW COLUMNS diff --git a/x-pack/docs/en/sql/language/syntax/show-functions.asciidoc b/docs/reference/sql/language/syntax/show-functions.asciidoc similarity index 89% rename from x-pack/docs/en/sql/language/syntax/show-functions.asciidoc rename to docs/reference/sql/language/syntax/show-functions.asciidoc index 197b9e8cb3b79..964cdf39081c6 100644 --- a/x-pack/docs/en/sql/language/syntax/show-functions.asciidoc +++ b/docs/reference/sql/language/syntax/show-functions.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-show-functions]] === SHOW FUNCTIONS diff --git a/x-pack/docs/en/sql/language/syntax/show-tables.asciidoc b/docs/reference/sql/language/syntax/show-tables.asciidoc similarity index 89% rename from x-pack/docs/en/sql/language/syntax/show-tables.asciidoc rename to docs/reference/sql/language/syntax/show-tables.asciidoc index 9266b6d58058b..7772c39c6fc21 100644 --- a/x-pack/docs/en/sql/language/syntax/show-tables.asciidoc +++ b/docs/reference/sql/language/syntax/show-tables.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-show-tables]] === SHOW TABLES diff --git a/x-pack/docs/en/sql/overview.asciidoc b/docs/reference/sql/overview.asciidoc similarity index 97% rename from x-pack/docs/en/sql/overview.asciidoc rename to docs/reference/sql/overview.asciidoc index 36eff69f6263d..a72f5ca61feb5 100644 --- a/x-pack/docs/en/sql/overview.asciidoc +++ b/docs/reference/sql/overview.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-overview]] == Overview diff --git a/x-pack/docs/en/sql/security.asciidoc b/docs/reference/sql/security.asciidoc similarity index 98% rename from x-pack/docs/en/sql/security.asciidoc rename to docs/reference/sql/security.asciidoc index bba73a2a4de6d..64f554f023195 100644 --- a/x-pack/docs/en/sql/security.asciidoc +++ b/docs/reference/sql/security.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-security]] == Security From 51151027cdee34d668802b90dba263b2f4bc8695 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 22 Jun 2018 17:42:58 -0400 Subject: [PATCH 32/34] TEST: Add bwc recovery tests with synced-flush index Although the master branch does not affect by #31482, it's helpful to have BWC tests that verify the peer recovery with a synced-flush index. This commit adds the bwc tests from #31506 to the master branch. Relates #31482 Relates #31506 --- .../upgrades/FullClusterRestartIT.java | 22 +++++++++++-- .../elasticsearch/upgrades/RecoveryIT.java | 32 +++++++++++++++++++ 2 files changed, 52 insertions(+), 2 deletions(-) diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 57c6ad7ff861f..081a1918674d0 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -24,7 +24,9 @@ import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.Version; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Booleans; @@ -701,8 +703,24 @@ public void testRecovery() throws Exception { // make sure all recoveries are done ensureGreen(index); - // Explicitly flush so we're sure to have a bunch of documents in the Lucene index - client().performRequest("POST", "/_flush"); + // Recovering a synced-flush index from 5.x to 6.x might be subtle as a 5.x index commit does not have all 6.x commit tags. + if (randomBoolean()) { + // We have to spin synced-flush requests here because we fire the global checkpoint sync for the last write operation. + // A synced-flush request considers the global checkpoint sync as an going operation because it acquires a shard permit. + assertBusy(() -> { + try { + Response resp = client().performRequest(new Request("POST", index + "/_flush/synced")); + Map result = ObjectPath.createFromResponse(resp).evaluate("_shards"); + assertThat(result.get("successful"), equalTo(result.get("total"))); + assertThat(result.get("failed"), equalTo(0)); + } catch (ResponseException ex) { + throw new AssertionError(ex); // cause assert busy to retry + } + }); + } else { + // Explicitly flush so we're sure to have a bunch of documents in the Lucene index + assertOK(client().performRequest(new Request("POST", "/_flush"))); + } if (shouldHaveTranslog) { // Update a few documents so we are sure to have a translog indexRandomDocuments(count / 10, false /* Flushing here would invalidate the whole thing....*/, false, diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index 1351de16cf718..809cd40d698df 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -22,7 +22,9 @@ import org.apache.http.entity.StringEntity; import org.elasticsearch.Version; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -239,4 +241,34 @@ public void testRelocationWithConcurrentIndexing() throws Exception { } } + public void testRecoverSyncedFlushIndex() throws Exception { + final String index = "recover_synced_flush_index"; + if (CLUSTER_TYPE == ClusterType.OLD) { + Settings.Builder settings = Settings.builder() + .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) + .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1) + // if the node with the replica is the first to be restarted, while a replica is still recovering + // then delayed allocation will kick in. When the node comes back, the master will search for a copy + // but the recovering copy will be seen as invalid and the cluster health won't return to GREEN + // before timing out + .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms") + .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster + createIndex(index, settings.build()); + indexDocs(index, 0, randomInt(5)); + // We have to spin synced-flush requests here because we fire the global checkpoint sync for the last write operation. + // A synced-flush request considers the global checkpoint sync as an going operation because it acquires a shard permit. + assertBusy(() -> { + try { + Response resp = client().performRequest(new Request("POST", index + "/_flush/synced")); + Map result = ObjectPath.createFromResponse(resp).evaluate("_shards"); + assertThat(result.get("successful"), equalTo(result.get("total"))); + assertThat(result.get("failed"), equalTo(0)); + } catch (ResponseException ex) { + throw new AssertionError(ex); // cause assert busy to retry + } + }); + } + ensureGreen(index); + } + } From b6cc6fc2bc44fbde69617566d549829c466cccda Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 22 Jun 2018 18:09:37 -0700 Subject: [PATCH 33/34] [DOCS] Updates Watcher examples for code testing (#31152) --- x-pack/docs/build.gradle | 32 ------------------- x-pack/docs/en/watcher/actions/email.asciidoc | 7 ++-- .../docs/en/watcher/actions/hipchat.asciidoc | 4 ++- x-pack/docs/en/watcher/actions/index.asciidoc | 1 + x-pack/docs/en/watcher/actions/jira.asciidoc | 1 + .../docs/en/watcher/actions/logging.asciidoc | 1 + .../en/watcher/actions/pagerduty.asciidoc | 2 ++ x-pack/docs/en/watcher/actions/slack.asciidoc | 3 ++ .../docs/en/watcher/actions/webhook.asciidoc | 4 +++ .../docs/en/watcher/condition/always.asciidoc | 1 + .../watcher/condition/array-compare.asciidoc | 1 + .../en/watcher/condition/compare.asciidoc | 3 ++ .../docs/en/watcher/condition/never.asciidoc | 1 + .../docs/en/watcher/condition/script.asciidoc | 7 ++++ .../en/watcher/customizing-watches.asciidoc | 10 ++++-- .../example-watch-meetupdata.asciidoc | 24 ++++++++++++-- .../en/watcher/how-watcher-works.asciidoc | 7 ++++ x-pack/docs/en/watcher/input/chain.asciidoc | 2 ++ x-pack/docs/en/watcher/input/http.asciidoc | 6 ++++ x-pack/docs/en/watcher/input/search.asciidoc | 5 +++ x-pack/docs/en/watcher/input/simple.asciidoc | 2 ++ x-pack/docs/en/watcher/transform.asciidoc | 1 + .../docs/en/watcher/transform/chain.asciidoc | 1 + .../docs/en/watcher/transform/script.asciidoc | 1 + .../docs/en/watcher/transform/search.asciidoc | 4 +++ .../en/watcher/trigger/schedule/cron.asciidoc | 2 ++ .../watcher/trigger/schedule/daily.asciidoc | 4 +++ .../watcher/trigger/schedule/hourly.asciidoc | 2 ++ .../trigger/schedule/interval.asciidoc | 1 + .../watcher/trigger/schedule/monthly.asciidoc | 3 ++ .../watcher/trigger/schedule/weekly.asciidoc | 3 ++ .../watcher/trigger/schedule/yearly.asciidoc | 3 ++ .../docs/en/watcher/troubleshooting.asciidoc | 2 ++ 33 files changed, 110 insertions(+), 41 deletions(-) diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 912c9965b4d9a..4176cdeb0b7d6 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -14,38 +14,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/security/authorization/run-as-privilege.asciidoc', 'en/security/ccs-clients-integrations/http.asciidoc', 'en/security/authorization/custom-roles-provider.asciidoc', - 'en/watcher/actions/email.asciidoc', - 'en/watcher/actions/hipchat.asciidoc', - 'en/watcher/actions/index.asciidoc', - 'en/watcher/actions/logging.asciidoc', - 'en/watcher/actions/pagerduty.asciidoc', - 'en/watcher/actions/slack.asciidoc', - 'en/watcher/actions/jira.asciidoc', - 'en/watcher/actions/webhook.asciidoc', - 'en/watcher/condition/always.asciidoc', - 'en/watcher/condition/array-compare.asciidoc', - 'en/watcher/condition/compare.asciidoc', - 'en/watcher/condition/never.asciidoc', - 'en/watcher/condition/script.asciidoc', - 'en/watcher/customizing-watches.asciidoc', - 'en/watcher/example-watches/example-watch-meetupdata.asciidoc', - 'en/watcher/how-watcher-works.asciidoc', - 'en/watcher/input/chain.asciidoc', - 'en/watcher/input/http.asciidoc', - 'en/watcher/input/search.asciidoc', - 'en/watcher/input/simple.asciidoc', - 'en/watcher/transform.asciidoc', - 'en/watcher/transform/chain.asciidoc', - 'en/watcher/transform/script.asciidoc', - 'en/watcher/transform/search.asciidoc', - 'en/watcher/trigger/schedule/cron.asciidoc', - 'en/watcher/trigger/schedule/daily.asciidoc', - 'en/watcher/trigger/schedule/hourly.asciidoc', - 'en/watcher/trigger/schedule/interval.asciidoc', - 'en/watcher/trigger/schedule/monthly.asciidoc', - 'en/watcher/trigger/schedule/weekly.asciidoc', - 'en/watcher/trigger/schedule/yearly.asciidoc', - 'en/watcher/troubleshooting.asciidoc', 'en/rest-api/ml/delete-snapshot.asciidoc', 'en/rest-api/ml/get-bucket.asciidoc', 'en/rest-api/ml/get-job-stats.asciidoc', diff --git a/x-pack/docs/en/watcher/actions/email.asciidoc b/x-pack/docs/en/watcher/actions/email.asciidoc index 5e0ee4c451ac6..0da028fcc7b1e 100644 --- a/x-pack/docs/en/watcher/actions/email.asciidoc +++ b/x-pack/docs/en/watcher/actions/email.asciidoc @@ -35,6 +35,7 @@ the watch payload in the email body: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action. <2> The action type is set to `email`. <3> One or more addresses to send the email to. Must be specified in the @@ -92,6 +93,7 @@ killed by firewalls or load balancers inbetween. } } -------------------------------------------------- +// NOTCONSOLE <1> The ID of the attachment, which is used as the file name in the email attachment. <2> The type of the attachment and its specific configuration. @@ -158,9 +160,8 @@ include::{kib-repo-dir}/reporting/watch-example.asciidoc[] include::{kib-repo-dir}/reporting/report-intervals.asciidoc[] -//TODO: RE-ADD LINK: -//For more information, see -//{kibana-ref}/automating-report-generation.html[Automating Report Generation]. +For more information, see +{kibana-ref}/automating-report-generation.html[Automating Report Generation]. [[email-action-attributes]] ==== Email Action Attributes diff --git a/x-pack/docs/en/watcher/actions/hipchat.asciidoc b/x-pack/docs/en/watcher/actions/hipchat.asciidoc index e92c84638dc19..da5b7558c4a58 100644 --- a/x-pack/docs/en/watcher/actions/hipchat.asciidoc +++ b/x-pack/docs/en/watcher/actions/hipchat.asciidoc @@ -37,6 +37,7 @@ attribute is the message itself: } } -------------------------------------------------- +// NOTCONSOLE <1> The name of a HipChat account configured in `elasticsearch.yml`. <2> The message you want to send to HipChat. @@ -66,6 +67,7 @@ For example, the following action is configured to send messages to the } } -------------------------------------------------- +// NOTCONSOLE To send messages with a HipChat account that uses the <> profile, you need to specify what room or rooms you want to send the message to. @@ -92,7 +94,7 @@ For example, the following action is configured to send messages to the } } -------------------------------------------------- - +// NOTCONSOLE [[hipchat-action-attributes]] ==== HipChat Action Attributes diff --git a/x-pack/docs/en/watcher/actions/index.asciidoc b/x-pack/docs/en/watcher/actions/index.asciidoc index dd8d76fe549f3..8a31b150f22cb 100644 --- a/x-pack/docs/en/watcher/actions/index.asciidoc +++ b/x-pack/docs/en/watcher/actions/index.asciidoc @@ -22,6 +22,7 @@ The following snippet shows a simple `index` action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action <2> An optional <> to restrict action execution <3> An optional <> to transform the payload and prepare the data that should be indexed diff --git a/x-pack/docs/en/watcher/actions/jira.asciidoc b/x-pack/docs/en/watcher/actions/jira.asciidoc index 4d35fd5163702..dc1afdc93b342 100644 --- a/x-pack/docs/en/watcher/actions/jira.asciidoc +++ b/x-pack/docs/en/watcher/actions/jira.asciidoc @@ -40,6 +40,7 @@ The following snippet shows a simple jira action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The name of a Jira account configured in `elasticsearch.yml`. <2> The key of the Jira project in which the issue will be created. <3> The name of the issue type. diff --git a/x-pack/docs/en/watcher/actions/logging.asciidoc b/x-pack/docs/en/watcher/actions/logging.asciidoc index aa747028f7fa2..a8a4454c377eb 100644 --- a/x-pack/docs/en/watcher/actions/logging.asciidoc +++ b/x-pack/docs/en/watcher/actions/logging.asciidoc @@ -25,6 +25,7 @@ The following snippet shows a simple logging action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action. <2> An optional <> to transform the payload before executing the `logging` action. diff --git a/x-pack/docs/en/watcher/actions/pagerduty.asciidoc b/x-pack/docs/en/watcher/actions/pagerduty.asciidoc index 1a673435a7ce6..1b93a0f219c6d 100644 --- a/x-pack/docs/en/watcher/actions/pagerduty.asciidoc +++ b/x-pack/docs/en/watcher/actions/pagerduty.asciidoc @@ -25,6 +25,7 @@ The following snippet shows a simple PagerDuty action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> Description of the message @@ -59,6 +60,7 @@ payload as well as an array of contexts to the action. } } -------------------------------------------------- +// NOTCONSOLE [[pagerduty-action-attributes]] diff --git a/x-pack/docs/en/watcher/actions/slack.asciidoc b/x-pack/docs/en/watcher/actions/slack.asciidoc index 3dc3c2c04ca73..0753f333dc733 100644 --- a/x-pack/docs/en/watcher/actions/slack.asciidoc +++ b/x-pack/docs/en/watcher/actions/slack.asciidoc @@ -29,6 +29,7 @@ The following snippet shows a simple slack action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The channels and users you want to send the message to. <2> The content of the message. @@ -66,6 +67,7 @@ The following snippet shows a standard message attachment: } } -------------------------------------------------- +// NOTCONSOLE [[slack-dynamic-attachment]] @@ -131,6 +133,7 @@ aggregation and the Slack action: } } -------------------------------------------------- +// NOTCONSOLE <1> The list generated by the action's transform. <2> The parameter placeholders refer to attributes in each item of the list generated by the transform. diff --git a/x-pack/docs/en/watcher/actions/webhook.asciidoc b/x-pack/docs/en/watcher/actions/webhook.asciidoc index 806777a406c6f..aabfb17f3b6e5 100644 --- a/x-pack/docs/en/watcher/actions/webhook.asciidoc +++ b/x-pack/docs/en/watcher/actions/webhook.asciidoc @@ -30,6 +30,7 @@ The following snippet shows a simple webhook action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action <2> An optional <> to transform the payload before executing the `webhook` action @@ -65,6 +66,7 @@ For example, the following `webhook` action creates a new issue in GitHub: } } -------------------------------------------------- +// NOTCONSOLE <1> The username and password for the user creating the issue NOTE: By default, both the username and the password are stored in the `.watches` @@ -101,6 +103,7 @@ the values serve as the parameter values: } } -------------------------------------------------- +// NOTCONSOLE <1> The parameter values can contain templated strings. @@ -128,6 +131,7 @@ the values serve as the header values: } } -------------------------------------------------- +// NOTCONSOLE <1> The header values can contain templated strings. diff --git a/x-pack/docs/en/watcher/condition/always.asciidoc b/x-pack/docs/en/watcher/condition/always.asciidoc index 22203018c926e..c2eb37be52c8f 100644 --- a/x-pack/docs/en/watcher/condition/always.asciidoc +++ b/x-pack/docs/en/watcher/condition/always.asciidoc @@ -22,3 +22,4 @@ object: "always" : {} } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/condition/array-compare.asciidoc b/x-pack/docs/en/watcher/condition/array-compare.asciidoc index 48b073e202c34..b413690865e60 100644 --- a/x-pack/docs/en/watcher/condition/array-compare.asciidoc +++ b/x-pack/docs/en/watcher/condition/array-compare.asciidoc @@ -34,6 +34,7 @@ than or equal to 25: } } -------------------------------------------------- +// NOTCONSOLE <1> The path to the array in the execution context that you want to evaluate, specified in dot notation. <2> The path to the field in each array element that you want to evaluate. diff --git a/x-pack/docs/en/watcher/condition/compare.asciidoc b/x-pack/docs/en/watcher/condition/compare.asciidoc index fc30a44bafe49..d58638e6fe472 100644 --- a/x-pack/docs/en/watcher/condition/compare.asciidoc +++ b/x-pack/docs/en/watcher/condition/compare.asciidoc @@ -49,6 +49,7 @@ search result>> is greater than or equal to 5: } } -------------------------------------------------- +// NOTCONSOLE <1> Use dot notation to reference a value in the execution context. <2> Specify a comparison operator and the value you want to compare against. @@ -68,6 +69,7 @@ of the form `<{expression}>`. For example, the following expression returns } } -------------------------------------------------- +// NOTCONSOLE You can also compare two values in the execution context by specifying the compared value as a path of the form of `{{path}}`. For example, the following @@ -85,6 +87,7 @@ to the `ctx.payload.aggregations.handled.buckets.true.doc_count`: } } -------------------------------------------------- +// NOTCONSOLE ==== Accessing Values in the Execution Context diff --git a/x-pack/docs/en/watcher/condition/never.asciidoc b/x-pack/docs/en/watcher/condition/never.asciidoc index d3d5cf39a4465..b8cad0b8c04d5 100644 --- a/x-pack/docs/en/watcher/condition/never.asciidoc +++ b/x-pack/docs/en/watcher/condition/never.asciidoc @@ -17,3 +17,4 @@ you specify the condition type and associate it with an empty object: "never" : {} } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/condition/script.asciidoc b/x-pack/docs/en/watcher/condition/script.asciidoc index 98cdd974f0ce4..ee6a9531bf7b3 100644 --- a/x-pack/docs/en/watcher/condition/script.asciidoc +++ b/x-pack/docs/en/watcher/condition/script.asciidoc @@ -19,6 +19,7 @@ The following snippet configures an inline `script` condition that always return "script" : "return true" } -------------------------------------------------- +// NOTCONSOLE This example defines a script as a simple string. This format is actually a shortcut for defining an <> script. The @@ -43,6 +44,7 @@ parameter, `result`: } } -------------------------------------------------- +// NOTCONSOLE [[condition-script-inline]] ==== Inline Scripts @@ -59,6 +61,7 @@ always returns `true`. } } -------------------------------------------------- +// NOTCONSOLE [[condition-script-stored]] ==== Stored Scripts @@ -74,6 +77,7 @@ in Elasticsearch. The following snippet shows how to refer to a script by its `i } } -------------------------------------------------- +// NOTCONSOLE As with <> scripts, you can also specify the script language and parameters: @@ -88,6 +92,7 @@ scripts, you can also specify the script language and parameters: } } -------------------------------------------------- +// NOTCONSOLE [[accessing-watch-payload]] ==== Accessing the Watch Payload @@ -121,6 +126,7 @@ threshold: } } -------------------------------------------------- +// NOTCONSOLE When you're using a scripted condition to evaluate an Elasticsearch response, keep in mind that the fields in the response are no longer in their native data @@ -132,6 +138,7 @@ you need to parse the `@timestamp` string into a `DateTime`. For example: -------------------------------------------------- org.elasticsearch.common.joda.time.DateTime.parse(@timestamp) -------------------------------------------------- +// NOTCONSOLE You can reference the following variables in the watch context: diff --git a/x-pack/docs/en/watcher/customizing-watches.asciidoc b/x-pack/docs/en/watcher/customizing-watches.asciidoc index 66204a6d0f5b9..fc45bc636bfc5 100644 --- a/x-pack/docs/en/watcher/customizing-watches.asciidoc +++ b/x-pack/docs/en/watcher/customizing-watches.asciidoc @@ -36,6 +36,7 @@ fields in the payload: } } ------------------------------------- +// NOTCONSOLE See <> for more details. @@ -74,6 +75,7 @@ For example, the following `search` input loads the latest VIX quote: } } -------------------------------------------------- +// NOTCONSOLE <1> Will resolve to today's daily quotes index See <> for more details. @@ -105,7 +107,7 @@ Amsterdam using http://openweathermap.org/appid[OpenWeatherMap] online service: } } -------------------------------------------------- - +// NOTCONSOLE See <> for more details. [[chaining-inputs]] @@ -146,7 +148,7 @@ returned any hits: "compare" : { "ctx.payload.hits.total" : { "gt" : 0 }} }, -------------------------------------------------- - +// NOTCONSOLE See <> for more details. ==== Powerful Comparison Logic with the Script Condition @@ -176,7 +178,7 @@ VIX quote loaded by the `http` input is either greater than 5% or lower than -5% } } -------------------------------------------------- - +// NOTCONSOLE See <> for more details. [[using-transforms]] @@ -231,6 +233,7 @@ attaches the payload data to the message: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action <2> The action type, in this case it's an `email` action @@ -261,6 +264,7 @@ creates a new issue in GitHub } } -------------------------------------------------- +// NOTCONSOLE <1> `` is the owner of the GitHub repo and `` is the name of the repo. <2> The username that creates the issue <3> The password of that user diff --git a/x-pack/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc b/x-pack/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc index 041a8ec81a7e3..d933a38d7d670 100644 --- a/x-pack/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc +++ b/x-pack/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc @@ -13,6 +13,7 @@ To ingest this data with Logstash: . Create a Logstash configuration file that uses the {logstash-ref}/plugins-inputs-stdin.html[Logstash standard input] and the {logstash-ref}/plugins-outputs-stdout.html[Logstash standard output] and save it in `logstash-{version}` directory as `livestream.conf`: + +-- [source,ruby] ---------------------------------------------------------- input { @@ -38,16 +39,20 @@ output { <2> } ---------------------------------------------------------- +// NOTCONSOLE <1> The meetup data stream is formatted in JSON. <2> Index the meetup data into Elasticsearch. +-- . To start indexing the meetup data, pipe the RSVP stream into Logstash and specify your `livestream.conf` configuration file. + -[source,she] +-- +[source,shell] ---------------------------------------------------------- - curl http://stream.meetup.com/2/rsvps | bin/logstash -f livestream.conf --------------------------------------------------------- +// NOTCONSOLE +-- Now that you're indexing the meetup RSVPs, you can set up a watch that lets you know about events you might be interested in. For example, let's create a watch that runs every hour, looks for events that talk about about _Open Source_, and sends an email with information about the events. @@ -56,6 +61,7 @@ To set up the watch: . Specify how often you want to run the watch by adding a schedule trigger to the watch: + +-- [source,js] -------------------------------------------------- { @@ -65,8 +71,11 @@ To set up the watch: } }, -------------------------------------------------- +// NOTCONSOLE +-- . Load data into the watch payload by creating an input that searches the meetup data for events that have _Open Source_ as a topic. You can use aggregations to group the data by city, consolidate references to the same events, and sort the events by date. + +-- [source,js] ------------------------------------------------- "input": { @@ -135,19 +144,28 @@ To set up the watch: } }, -------------------------------------------------- +// NOTCONSOLE <1> Elasticsearch Date math is used to select the Logstash indices that contain the meetup data. The second pattern is needed in case the previous hour crosses days. <2> Find all of the RSVPs with `Open Source` as a topic. <3> Group the RSVPs by city. <4> Consolidate multiple RSVPs for the same event. <5> Sort the events so the latest events are listed first. <6> Group the events by name. +-- . To determine whether or not there are any Open Source events, add a compare condition that checks the watch payload to see if there were any search hits. ++ +-- [source,js] -------------------------------------------------- "compare" : { "ctx.payload.hits.total" : { "gt" : 0 }} -------------------------------------------------- +// NOTCONSOLE +-- + . To send an email when _Open Source_ events are found, add an email action: ++ +-- [source,js] -------------------------------------------------- "actions": { @@ -167,6 +185,8 @@ To set up the watch: } } --------------------------------------------------- +// NOTCONSOLE +-- NOTE: To enable Watcher to send emails, you must configure an email account in `elasticsearch.yml`. For more information, see <>. diff --git a/x-pack/docs/en/watcher/how-watcher-works.asciidoc b/x-pack/docs/en/watcher/how-watcher-works.asciidoc index b47b83dbf1ede..2bd19c1a41e02 100644 --- a/x-pack/docs/en/watcher/how-watcher-works.asciidoc +++ b/x-pack/docs/en/watcher/how-watcher-works.asciidoc @@ -283,6 +283,7 @@ The following snippet shows the basic structure of the _Watch Execution Context_ "vars" : { ... } <6> } ---------------------------------------------------------------------- +// NOTCONSOLE <1> Any static metadata specified in the watch definition. <2> The current watch payload. <3> The id of the executing watch. @@ -348,6 +349,7 @@ in sent emails: } } ---------------------------------------------------------------------- +// NOTCONSOLE [float] [[inline-templates-scripts]] @@ -369,6 +371,7 @@ the context metadata. } } ---------------------------------------------------------------------- +// NOTCONSOLE For a script, you simply specify the inline script as the value of the `script` field. For example: @@ -379,6 +382,7 @@ field. For example: "script" : "return true" } ---------------------------------------------------------------------- +// NOTCONSOLE You can also explicitly specify the inline type by using a formal object definition as the field value. For example: @@ -395,6 +399,7 @@ definition as the field value. For example: } } ---------------------------------------------------------------------- +// NOTCONSOLE The formal object definition for a script would be: @@ -406,6 +411,7 @@ The formal object definition for a script would be: } } ---------------------------------------------------------------------- +// NOTCONSOLE [float] [[stored-templates-scripts]] @@ -436,3 +442,4 @@ references the `email_notification_subject` template: } } ---------------------------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/input/chain.asciidoc b/x-pack/docs/en/watcher/input/chain.asciidoc index 1984b60d45e20..9952773e7227a 100644 --- a/x-pack/docs/en/watcher/input/chain.asciidoc +++ b/x-pack/docs/en/watcher/input/chain.asciidoc @@ -38,6 +38,7 @@ path set by a `simple` input: } } -------------------------------------------------- +// NOTCONSOLE <1> The inputs in a chain are specified as an array to guarantee the order in which the inputs are processed. (JSON does not guarantee the order of arbitrary objects.) @@ -90,3 +91,4 @@ still be available in its original form in `ctx.payload.first`. } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/input/http.asciidoc b/x-pack/docs/en/watcher/input/http.asciidoc index 451903777d159..79d37d14a1bf4 100644 --- a/x-pack/docs/en/watcher/input/http.asciidoc +++ b/x-pack/docs/en/watcher/input/http.asciidoc @@ -40,6 +40,7 @@ index: } } -------------------------------------------------- +// NOTCONSOLE You can use the full Elasticsearch {ref}/query-dsl.html[Query DSL] to perform more sophisticated searches. For example, the following `http` input retrieves @@ -58,6 +59,7 @@ all documents that contain `event` in the `category` field: } } -------------------------------------------------- +// NOTCONSOLE ==== Calling Elasticsearch APIs @@ -82,6 +84,7 @@ Stats] API and enables the `human` attribute: } } -------------------------------------------------- +// NOTCONSOLE <1> Enabling this attribute returns the `bytes` values in the response in human readable format. @@ -110,6 +113,7 @@ a username and password to access `myservice`: } } -------------------------------------------------- +// NOTCONSOLE You can also pass in service-specific API keys and other information through the `params` attribute. For example, the following `http` @@ -131,6 +135,7 @@ http://openweathermap.org/appid[OpenWeatherMap] service: } } -------------------------------------------------- +// NOTCONSOLE ==== Using Templates @@ -153,6 +158,7 @@ and restrict the results to documents added within the last five minutes: } } -------------------------------------------------- +// NOTCONSOLE ==== Accessing the HTTP Response diff --git a/x-pack/docs/en/watcher/input/search.asciidoc b/x-pack/docs/en/watcher/input/search.asciidoc index a9782c482bd37..7ce67bfc1dc2b 100644 --- a/x-pack/docs/en/watcher/input/search.asciidoc +++ b/x-pack/docs/en/watcher/input/search.asciidoc @@ -32,6 +32,7 @@ documents from the `logs` index: } } -------------------------------------------------- +// NOTCONSOLE You can use date math and wildcards when specifying indices. For example, the following input loads the latest VIXZ quote from today's daily quotes index: @@ -57,6 +58,7 @@ the following input loads the latest VIXZ quote from today's daily quotes index: } } -------------------------------------------------- +// NOTCONSOLE ==== Extracting Specific Fields @@ -78,6 +80,7 @@ watch payload: } }, -------------------------------------------------- +// NOTCONSOLE ==== Using Templates @@ -105,6 +108,7 @@ parameter: ... } -------------------------------------------------- +// NOTCONSOLE ==== Applying Conditions @@ -131,6 +135,7 @@ check if the search returned more than five hits: ... } -------------------------------------------------- +// NOTCONSOLE ==== Accessing the Search Results diff --git a/x-pack/docs/en/watcher/input/simple.asciidoc b/x-pack/docs/en/watcher/input/simple.asciidoc index 3b7b4c5734c57..c756a4e5403e2 100644 --- a/x-pack/docs/en/watcher/input/simple.asciidoc +++ b/x-pack/docs/en/watcher/input/simple.asciidoc @@ -20,6 +20,7 @@ an object (`obj`): } } -------------------------------------------------- +// NOTCONSOLE For example, the following watch uses the `simple` input to set the recipient name for a daily reminder email: @@ -48,3 +49,4 @@ name for a daily reminder email: } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/transform.asciidoc b/x-pack/docs/en/watcher/transform.asciidoc index 0351c9b8c1214..8241d7b0cb442 100644 --- a/x-pack/docs/en/watcher/transform.asciidoc +++ b/x-pack/docs/en/watcher/transform.asciidoc @@ -52,6 +52,7 @@ part of the definition of the `my_webhook` action. ] } -------------------------------------------------- +// NOTCONSOLE <1> A watch level `transform` <2> An action level `transform` diff --git a/x-pack/docs/en/watcher/transform/chain.asciidoc b/x-pack/docs/en/watcher/transform/chain.asciidoc index f17b05c71b4cc..9ad27fe48ed81 100644 --- a/x-pack/docs/en/watcher/transform/chain.asciidoc +++ b/x-pack/docs/en/watcher/transform/chain.asciidoc @@ -33,6 +33,7 @@ following snippet: ] } -------------------------------------------------- +// NOTCONSOLE <1> The `chain` transform definition <2> The first transform in the chain (in this case, a `search` transform) <3> The second and final transform in the chain (in this case, a `script` diff --git a/x-pack/docs/en/watcher/transform/script.asciidoc b/x-pack/docs/en/watcher/transform/script.asciidoc index 0a3bd401dc744..f1a46d482d9e6 100644 --- a/x-pack/docs/en/watcher/transform/script.asciidoc +++ b/x-pack/docs/en/watcher/transform/script.asciidoc @@ -20,6 +20,7 @@ TIP: The `script` transform is often useful when used in combination with the } } -------------------------------------------------- +// NOTCONSOLE <1> A simple `painless` script that creates a new payload with a single `time` field holding the scheduled time. diff --git a/x-pack/docs/en/watcher/transform/search.asciidoc b/x-pack/docs/en/watcher/transform/search.asciidoc index eaf7c80c6cbb3..56f9304d986ce 100644 --- a/x-pack/docs/en/watcher/transform/search.asciidoc +++ b/x-pack/docs/en/watcher/transform/search.asciidoc @@ -18,6 +18,7 @@ defined on the watch level: } } -------------------------------------------------- +// NOTCONSOLE Like every other search based construct, one can make use of the full search API supported by Elasticsearch. For example, the following search transform @@ -41,6 +42,7 @@ execute a search over all events indices, matching events with `error` priority: } } -------------------------------------------------- +// NOTCONSOLE The following table lists all available settings for the search transform: @@ -129,6 +131,7 @@ time of the watch: } } -------------------------------------------------- +// NOTCONSOLE The model of the template is a union between the provided `template.params` settings and the <>. @@ -173,3 +176,4 @@ The following is an example of using templates that refer to provided parameters } } -------------------------------------------------- +// NOTCONSOLE diff --git a/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc index 57d330510971d..57a6ebdfd92ef 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc @@ -184,6 +184,7 @@ that triggers every day at noon: ... } -------------------------------------------------- +// NOTCONSOLE ===== Configuring a Multiple Times Cron Schedule @@ -207,6 +208,7 @@ minute during the weekend: ... } -------------------------------------------------- +// NOTCONSOLE [[croneval]] ===== Verifying Cron Expressions diff --git a/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc index e3165695e6aa8..e729335d59b29 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc @@ -28,6 +28,7 @@ day at 5:00 PM: } } -------------------------------------------------- +// NOTCONSOLE ===== Configuring a Multiple Times Daily Schedule @@ -45,6 +46,7 @@ triggers at `00:00`, `12:00`, and `17:00` every day. } } -------------------------------------------------- +// NOTCONSOLE [[specifying-times-using-objects]] ===== Specifying Times Using Objects @@ -69,6 +71,7 @@ For example, the following `daily` schedule triggers once every day at 5:00 PM: } } -------------------------------------------------- +// NOTCONSOLE To specify multiple times using the object notation, you specify multiple hours or minutes as an array. For example, following `daily` schedule triggers at @@ -89,3 +92,4 @@ or minutes as an array. For example, following `daily` schedule triggers at } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc index 48cc9dc2aa4a8..9ec750eebcd2b 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc @@ -28,6 +28,7 @@ For example, the following `hourly` schedule triggers at minute 30 every hour-- } } -------------------------------------------------- +// NOTCONSOLE ===== Configuring a Multiple Times Hourly Schedule @@ -46,3 +47,4 @@ triggers every 15 minutes every hour--`12:00`, `12:15`, `12:30`, `12:45`, } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc index b65c16646e176..e534181ec0c2f 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc @@ -34,3 +34,4 @@ For example, the following `interval` schedule triggers every five minutes: } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc index e6bf292d91811..d2cfe409992a7 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc @@ -26,6 +26,7 @@ on the 10th of each month at noon: } } -------------------------------------------------- +// NOTCONSOLE NOTE: You can also specify the day and time with the `day` and `time` attributes, they are interchangeable with `on` and `at`. @@ -50,6 +51,7 @@ schedule triggers at 12:00 PM on the 10th of each month and at 5:00 PM on the } } -------------------------------------------------- +// NOTCONSOLE Alternatively, you can specify days and times in an object that has `on` and `at` attributes that contain an array of values. For example, the following `monthly` @@ -68,3 +70,4 @@ schedule triggers at 12:00 AM and 12:00 PM on the 10th and 20th of each month. } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc index a5ac52d0e0d01..d6a403cb125c6 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc @@ -32,6 +32,7 @@ triggers once a week on Friday at 5:00 PM: } } -------------------------------------------------- +// NOTCONSOLE NOTE: You can also specify the day and time with the `day` and `time` attributes, they are interchangeable with `on` and `at`. @@ -55,6 +56,7 @@ schedule triggers every Tuesday at 12:00 PM and every Friday at 5:00 PM: } } -------------------------------------------------- +// NOTCONSOLE Alternatively, you can specify days and times in an object that has `on` and `minute` attributes that contain an array of values. For example, the following @@ -73,3 +75,4 @@ Alternatively, you can specify days and times in an object that has `on` and } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc index 9ea9e1d1b47bc..d11cc5d072787 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc @@ -37,6 +37,7 @@ example, the following `yearly` schedule triggers once a year at noon on January } } -------------------------------------------------- +// NOTCONSOLE NOTE: You can also specify the month, day, and time with the `month`, `day`, and `time` attributes, they are interchangeable with `in`, `on`, and `at`. @@ -61,6 +62,7 @@ on July 20th. } } -------------------------------------------------- +// NOTCONSOLE Alternatively, you can specify the months, days, and times in an object that has `in`, `on`, and `minute` attributes that contain an array of values. For example, @@ -81,3 +83,4 @@ January 20th, December 10th, and December 20th. } } -------------------------------------------------- +// NOTCONSOLE diff --git a/x-pack/docs/en/watcher/troubleshooting.asciidoc b/x-pack/docs/en/watcher/troubleshooting.asciidoc index 8b793142ecc2b..20d599f8f5215 100644 --- a/x-pack/docs/en/watcher/troubleshooting.asciidoc +++ b/x-pack/docs/en/watcher/troubleshooting.asciidoc @@ -30,6 +30,8 @@ mappings: -------------------------------------------------- DELETE .watches -------------------------------------------------- +// CONSOLE +// TEST[skip:index deletion] + . Disable direct access to the `.watches` index: .. Stop the Elasticsearch node. From b7ef75fed6d75e1a88b000e36ee78a018c0c53d0 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Sat, 23 Jun 2018 09:39:17 +0200 Subject: [PATCH 34/34] Add get field mappings to High Level REST API Client (#31423) Add get field mappings to High Level REST API Client Relates to #27205 --- .../elasticsearch/client/IndicesClient.java | 31 +++++ .../client/RequestConverters.java | 20 +++ .../elasticsearch/client/IndicesClientIT.java | 38 ++++++ .../client/RequestConvertersTests.java | 64 ++++++++- .../IndicesClientDocumentationIT.java | 106 +++++++++++++++ .../indices/get_field_mappings.asciidoc | 86 ++++++++++++ .../high-level/supported-apis.asciidoc | 2 + .../mapping/get/GetFieldMappingsResponse.java | 123 +++++++++++++++++- .../get/GetFieldMappingsResponseTests.java | 100 +++++++++++++- 9 files changed, 562 insertions(+), 8 deletions(-) create mode 100644 docs/java-rest/high-level/indices/get_field_mappings.asciidoc diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 30a42eb333f4a..28a9cc2036673 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -37,6 +37,8 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -188,6 +190,35 @@ public void getMappingsAsync(GetMappingsRequest getMappingsRequest, RequestOptio GetMappingsResponse::fromXContent, listener, emptySet()); } + /** + * Retrieves the field mappings on an index or indices using the Get Field Mapping API. + * See + * Get Field Mapping API on elastic.co + * @param getFieldMappingsRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetFieldMappingsResponse getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest, + RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(getFieldMappingsRequest, RequestConverters::getFieldMapping, options, + GetFieldMappingsResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously retrieves the field mappings on an index on indices using the Get Field Mapping API. + * See + * Get Field Mapping API on elastic.co + * @param getFieldMappingsRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void getFieldMappingAsync(GetFieldMappingsRequest getFieldMappingsRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(getFieldMappingsRequest, RequestConverters::getFieldMapping, options, + GetFieldMappingsResponse::fromXContent, listener, emptySet()); + } + /** * Updates aliases using the Index Aliases API. * See diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 3d5d275732a6c..63a0e0e98377e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -50,6 +50,7 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; @@ -230,6 +231,25 @@ static Request getMappings(GetMappingsRequest getMappingsRequest) throws IOExcep return request; } + static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) throws IOException { + String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices(); + String[] types = getFieldMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.types(); + String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields(); + + String endpoint = new EndpointBuilder().addCommaSeparatedPathParts(indices) + .addPathPartAsIs("_mapping").addCommaSeparatedPathParts(types) + .addPathPartAsIs("field").addCommaSeparatedPathParts(fields) + .build(); + + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + Params parameters = new Params(request); + parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions()); + parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults()); + parameters.withLocal(getFieldMappingsRequest.local()); + return request; + } + static Request refresh(RefreshRequest refreshRequest) { String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices(); Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_refresh")); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index c226b5349267c..5f8e6b5d36526 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -43,6 +43,8 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -74,6 +76,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -378,6 +381,41 @@ public void testGetMapping() throws IOException { assertThat(mappings, equalTo(expected)); } + public void testGetFieldMapping() throws IOException { + String indexName = "test"; + createIndex(indexName, Settings.EMPTY); + + PutMappingRequest putMappingRequest = new PutMappingRequest(indexName); + putMappingRequest.type("_doc"); + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject().startObject("properties").startObject("field"); + mappingBuilder.field("type", "text"); + mappingBuilder.endObject().endObject().endObject(); + putMappingRequest.source(mappingBuilder); + + PutMappingResponse putMappingResponse = + execute(putMappingRequest, highLevelClient().indices()::putMapping, highLevelClient().indices()::putMappingAsync); + assertTrue(putMappingResponse.isAcknowledged()); + + GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest() + .indices(indexName) + .types("_doc") + .fields("field"); + + GetFieldMappingsResponse getFieldMappingsResponse = + execute(getFieldMappingsRequest, + highLevelClient().indices()::getFieldMapping, + highLevelClient().indices()::getFieldMappingAsync); + + final Map fieldMappingMap = + getFieldMappingsResponse.mappings().get(indexName).get("_doc"); + + final GetFieldMappingsResponse.FieldMappingMetaData metaData = + new GetFieldMappingsResponse.FieldMappingMetaData("field", + new BytesArray("{\"field\":{\"type\":\"text\"}}")); + assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metaData))); + } + public void testDeleteIndex() throws IOException { { // Delete index if exists diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index e8bbbf6f5fd0c..b8714967b412a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; @@ -457,6 +458,61 @@ public void testGetMapping() throws IOException { assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); } + public void testGetFieldMapping() throws IOException { + GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest(); + + String[] indices = Strings.EMPTY_ARRAY; + if (randomBoolean()) { + indices = randomIndicesNames(0, 5); + getFieldMappingsRequest.indices(indices); + } else if (randomBoolean()) { + getFieldMappingsRequest.indices((String[]) null); + } + + String type = null; + if (randomBoolean()) { + type = randomAlphaOfLengthBetween(3, 10); + getFieldMappingsRequest.types(type); + } else if (randomBoolean()) { + getFieldMappingsRequest.types((String[]) null); + } + + String[] fields = null; + if (randomBoolean()) { + fields = new String[randomIntBetween(1, 5)]; + for (int i = 0; i < fields.length; i++) { + fields[i] = randomAlphaOfLengthBetween(3, 10); + } + getFieldMappingsRequest.fields(fields); + } else if (randomBoolean()) { + getFieldMappingsRequest.fields((String[]) null); + } + + Map expectedParams = new HashMap<>(); + + setRandomIndicesOptions(getFieldMappingsRequest::indicesOptions, getFieldMappingsRequest::indicesOptions, expectedParams); + setRandomLocal(getFieldMappingsRequest::local, expectedParams); + + Request request = RequestConverters.getFieldMapping(getFieldMappingsRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + String index = String.join(",", indices); + if (Strings.hasLength(index)) { + endpoint.add(index); + } + endpoint.add("_mapping"); + if (type != null) { + endpoint.add(type); + } + endpoint.add("field"); + if (fields != null) { + endpoint.add(String.join(",", fields)); + } + assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + + assertThat(expectedParams, equalTo(request.getParameters())); + assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); + } + public void testDeleteIndex() { String[] indices = randomIndicesNames(0, 5); DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indices); @@ -2268,16 +2324,20 @@ private static void setRandomHumanReadable(GetIndexRequest request, Map request, Map expectedParams) { + private static void setRandomLocal(Consumer setter, Map expectedParams) { if (randomBoolean()) { boolean local = randomBoolean(); - request.local(local); + setter.accept(local); if (local) { expectedParams.put("local", String.valueOf(local)); } } } + private static void setRandomLocal(MasterNodeReadRequest request, Map expectedParams) { + setRandomLocal(request::local, expectedParams); + } + private static void setRandomTimeout(Consumer setter, TimeValue defaultTimeout, Map expectedParams) { if (randomBoolean()) { String timeout = randomTimeValue(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 9cc28152d03e3..95fa7f7185b5b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -41,6 +41,8 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -703,6 +705,110 @@ public void onFailure(Exception e) { } } + public void testGetFieldMapping() throws IOException, InterruptedException { + RestHighLevelClient client = highLevelClient(); + + { + CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("twitter"), RequestOptions.DEFAULT); + assertTrue(createIndexResponse.isAcknowledged()); + PutMappingRequest request = new PutMappingRequest("twitter"); + request.type("tweet"); + request.source( + "{\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"timestamp\": {\n" + + " \"type\": \"date\"\n" + + " }\n" + + " }\n" + + "}", // <1> + XContentType.JSON); + PutMappingResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); + assertTrue(putMappingResponse.isAcknowledged()); + } + + // tag::get-field-mapping-request + GetFieldMappingsRequest request = new GetFieldMappingsRequest(); // <1> + request.indices("twitter"); // <2> + request.types("tweet"); // <3> + request.fields("message", "timestamp"); // <4> + // end::get-field-mapping-request + + // tag::get-field-mapping-request-indicesOptions + request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1> + // end::get-field-mapping-request-indicesOptions + + // tag::get-field-mapping-request-local + request.local(true); // <1> + // end::get-field-mapping-request-local + + { + + // tag::get-field-mapping-execute + GetFieldMappingsResponse response = + client.indices().getFieldMapping(request, RequestOptions.DEFAULT); + // end::get-field-mapping-execute + + // tag::get-field-mapping-response + final Map>> mappings = + response.mappings();// <1> + final Map typeMappings = + mappings.get("twitter").get("tweet"); // <2> + final GetFieldMappingsResponse.FieldMappingMetaData metaData = + typeMappings.get("message");// <3> + + final String fullName = metaData.fullName();// <4> + final Map source = metaData.sourceAsMap(); // <5> + // end::get-field-mapping-response + } + + { + // tag::get-field-mapping-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetFieldMappingsResponse putMappingResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-field-mapping-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + final ActionListener latchListener = new LatchedActionListener<>(listener, latch); + listener = ActionListener.wrap(r -> { + final Map>> mappings = + r.mappings(); + final Map typeMappings = + mappings.get("twitter").get("tweet"); + final GetFieldMappingsResponse.FieldMappingMetaData metaData1 = typeMappings.get("message"); + + final String fullName = metaData1.fullName(); + final Map source = metaData1.sourceAsMap(); + latchListener.onResponse(r); + }, e -> { + latchListener.onFailure(e); + fail("should not fail"); + }); + + // tag::get-field-mapping-execute-async + client.indices().getFieldMappingAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::get-field-mapping-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + + } + + public void testOpenIndex() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/docs/java-rest/high-level/indices/get_field_mappings.asciidoc b/docs/java-rest/high-level/indices/get_field_mappings.asciidoc new file mode 100644 index 0000000000000..3f5ff5aec6449 --- /dev/null +++ b/docs/java-rest/high-level/indices/get_field_mappings.asciidoc @@ -0,0 +1,86 @@ +[[java-rest-high-get-field-mappings]] +=== Get Field Mappings API + +[[java-rest-high-get-field-mappings-request]] +==== Get Field Mappings Request + +A `GetFieldMappingsRequest` can have an optional list of indices, optional list of types and the list of fields: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-request] +-------------------------------------------------- +<1> An empty request +<2> Setting the indices to fetch mapping for +<3> The types to be returned +<4> The fields to be returned + +==== Optional arguments +The following arguments can also optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-request-indicesOptions] +-------------------------------------------------- +<1> Setting `IndicesOptions` controls how unavailable indices are resolved and +how wildcard expressions are expanded + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-request-local] +-------------------------------------------------- +<1> The `local` flag (defaults to `false`) controls whether the aliases need +to be looked up in the local cluster state or in the cluster state held by +the elected master node + +[[java-rest-high-get-field-mappings-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-execute] +-------------------------------------------------- + +[[java-rest-high-get-field-mapping-async]] +==== Asynchronous Execution + +The asynchronous execution of a get mappings request requires both the +`GetFieldMappingsRequest` instance and an `ActionListener` instance to be passed to +the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-execute-async] +-------------------------------------------------- +<1> The `GetFieldMappingsRequest` to execute and the `ActionListener` to use when the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method if +the execution successfully completed or using the `onFailure` method if it +failed. + +A typical listener for `GetMappingsResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-get-field-mapping-response]] +==== Get Field Mappings Response + +The returned `GetFieldMappingsResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-response] +-------------------------------------------------- +<1> Returning all requested indices fields' mappings +<2> Retrieving the mappings for a particular index and type +<3> Getting the mappings metadata for the `message` field +<4> Getting the full name of the field +<5> Getting the mapping source of the field + diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 418eb528f8e00..9ed54db817551 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -77,6 +77,7 @@ Index Management:: Mapping Management:: * <> +* <> Alias Management:: * <> @@ -98,6 +99,7 @@ include::indices/force_merge.asciidoc[] include::indices/rollover.asciidoc[] include::indices/put_mapping.asciidoc[] include::indices/get_mappings.asciidoc[] +include::indices/get_field_mappings.asciidoc[] include::indices/update_aliases.asciidoc[] include::indices/exists_alias.asciidoc[] include::indices/get_alias.asciidoc[] diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index d837c1cbd199b..81b9812d61c5f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -20,13 +20,17 @@ package org.elasticsearch.action.admin.indices.mapping.get; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.Mapper; @@ -34,13 +38,45 @@ import java.io.InputStream; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; /** Response object for {@link GetFieldMappingsRequest} API */ public class GetFieldMappingsResponse extends ActionResponse implements ToXContentFragment { + private static final ParseField MAPPINGS = new ParseField("mappings"); + + private static final ObjectParser>, String> PARSER = + new ObjectParser<>(MAPPINGS.getPreferredName(), true, HashMap::new); + + static { + PARSER.declareField((p, typeMappings, index) -> { + p.nextToken(); + while (p.currentToken() == XContentParser.Token.FIELD_NAME) { + final String typeName = p.currentName(); + + if (p.nextToken() == XContentParser.Token.START_OBJECT) { + final Map typeMapping = new HashMap<>(); + typeMappings.put(typeName, typeMapping); + + while (p.nextToken() == XContentParser.Token.FIELD_NAME) { + final String fieldName = p.currentName(); + final FieldMappingMetaData fieldMappingMetaData = FieldMappingMetaData.fromXContent(p); + typeMapping.put(fieldName, fieldMappingMetaData); + } + } else { + p.skipChildren(); + } + p.nextToken(); + } + }, MAPPINGS, ObjectParser.ValueType.OBJECT); + } + private Map>> mappings = emptyMap(); GetFieldMappingsResponse(Map>> mappings) { @@ -77,7 +113,7 @@ public FieldMappingMetaData fieldMappings(String index, String type, String fiel public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { for (Map.Entry>> indexEntry : mappings.entrySet()) { builder.startObject(indexEntry.getKey()); - builder.startObject("mappings"); + builder.startObject(MAPPINGS.getPreferredName()); for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { builder.startObject(typeEntry.getKey()); for (Map.Entry fieldEntry : typeEntry.getValue().entrySet()) { @@ -93,9 +129,46 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + public static GetFieldMappingsResponse fromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + + final Map>> mappings = new HashMap<>(); + if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { + while (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + final String index = parser.currentName(); + + final Map> typeMappings = PARSER.parse(parser, index); + mappings.put(index, typeMappings); + + parser.nextToken(); + } + } + + return new GetFieldMappingsResponse(mappings); + } + public static class FieldMappingMetaData implements ToXContentFragment { public static final FieldMappingMetaData NULL = new FieldMappingMetaData("", BytesArray.EMPTY); + private static final ParseField FULL_NAME = new ParseField("full_name"); + private static final ParseField MAPPING = new ParseField("mapping"); + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("field_mapping_meta_data", true, + a -> new FieldMappingMetaData((String)a[0], (BytesReference)a[1]) + ); + + static { + PARSER.declareField(optionalConstructorArg(), + (p, c) -> p.text(), FULL_NAME, ObjectParser.ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), + (p, c) -> { + final XContentBuilder jsonBuilder = jsonBuilder().copyCurrentStructure(p); + final BytesReference bytes = BytesReference.bytes(jsonBuilder); + return bytes; + }, MAPPING, ObjectParser.ValueType.OBJECT); + } + private String fullName; private BytesReference source; @@ -122,18 +195,41 @@ BytesReference getSource() { return source; } + public static FieldMappingMetaData fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("full_name", fullName); + builder.field(FULL_NAME.getPreferredName(), fullName); if (params.paramAsBoolean("pretty", false)) { builder.field("mapping", sourceAsMap()); } else { try (InputStream stream = source.streamInput()) { - builder.rawField("mapping", stream, XContentType.JSON); + builder.rawField(MAPPING.getPreferredName(), stream, XContentType.JSON); } } return builder; } + + @Override + public String toString() { + return "FieldMappingMetaData{fullName='" + fullName + '\'' + ", source=" + source + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof FieldMappingMetaData)) return false; + FieldMappingMetaData that = (FieldMappingMetaData) o; + return Objects.equals(fullName, that.fullName) && + Objects.equals(source, that.source); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, source); + } } @Override @@ -178,4 +274,25 @@ public void writeTo(StreamOutput out) throws IOException { } } } + + @Override + public String toString() { + return "GetFieldMappingsResponse{" + + "mappings=" + mappings + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof GetFieldMappingsResponse)) return false; + GetFieldMappingsResponse that = (GetFieldMappingsResponse) o; + return Objects.equals(mappings, that.mappings); + } + + @Override + public int hashCode() { + return Objects.hash(mappings); + } + } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java index 4dc396323c048..b6e785a4d05be 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java @@ -23,16 +23,22 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.function.Predicate; -public class GetFieldMappingsResponseTests extends ESTestCase { +import static org.hamcrest.CoreMatchers.equalTo; - public void testSerialization() throws IOException { +public class GetFieldMappingsResponseTests extends AbstractStreamableXContentTestCase { + + public void testManualSerialization() throws IOException { Map>> mappings = new HashMap<>(); FieldMappingMetaData fieldMappingMetaData = new FieldMappingMetaData("my field", new BytesArray("{}")); mappings.put("index", Collections.singletonMap("type", Collections.singletonMap("field", fieldMappingMetaData))); @@ -49,4 +55,92 @@ public void testSerialization() throws IOException { } } } + + public void testManualJunkedJson() throws Exception { + // in fact random fields could be evaluated as proper mapping, while proper junk in this case is arrays and values + final String json = + "{\"index1\":{\"mappings\":" + + "{\"doctype0\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," + + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}," + // junk here + + "\"junk1\": [\"field1\", {\"field2\":{}}]," + + "\"junk2\": [{\"field3\":{}}]," + + "\"junk3\": 42," + + "\"junk4\": \"Q\"," + + "\"doctype1\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," + + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}}}," + + "\"index0\":{\"mappings\":" + + "{\"doctype0\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," + + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}," + + "\"doctype1\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," + + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}}}}"; + + final XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), + LoggingDeprecationHandler.INSTANCE, json.getBytes("UTF-8")); + + final GetFieldMappingsResponse response = GetFieldMappingsResponse.fromXContent(parser); + + FieldMappingMetaData fieldMappingMetaData = + new FieldMappingMetaData("my field", new BytesArray("{\"type\":\"keyword\"}")); + Map fieldMapping = new HashMap<>(); + fieldMapping.put("field0", fieldMappingMetaData); + fieldMapping.put("field1", fieldMappingMetaData); + + Map> typeMapping = new HashMap<>(); + typeMapping.put("doctype0", fieldMapping); + typeMapping.put("doctype1", fieldMapping); + + Map>> mappings = new HashMap<>(); + mappings.put("index0", typeMapping); + mappings.put("index1", typeMapping); + + final Map>> responseMappings = response.mappings(); + assertThat(responseMappings, equalTo(mappings)); + } + + @Override + protected GetFieldMappingsResponse doParseInstance(XContentParser parser) throws IOException { + return GetFieldMappingsResponse.fromXContent(parser); + } + + @Override + protected GetFieldMappingsResponse createBlankInstance() { + return new GetFieldMappingsResponse(); + } + + @Override + protected GetFieldMappingsResponse createTestInstance() { + return new GetFieldMappingsResponse(randomMapping()); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // allow random fields at the level of `index` and `index.mappings.doctype.field` + // otherwise random field could be evaluated as index name or type name + return s -> false == (s.matches("(?[^.]+)") + || s.matches("(?[^.]+)\\.mappings\\.(?[^.]+)\\.(?[^.]+)")); + } + + private Map>> randomMapping() { + Map>> mappings = new HashMap<>(); + + int indices = randomInt(10); + for(int i = 0; i < indices; i++) { + final Map> doctypesMappings = new HashMap<>(); + int doctypes = randomInt(10); + for(int j = 0; j < doctypes; j++) { + Map fieldMappings = new HashMap<>(); + int fields = randomInt(10); + for(int k = 0; k < fields; k++) { + final String mapping = randomBoolean() ? "{\"type\":\"string\"}" : "{\"type\":\"keyword\"}"; + FieldMappingMetaData metaData = + new FieldMappingMetaData("my field", new BytesArray(mapping)); + fieldMappings.put("field" + k, metaData); + } + doctypesMappings.put("doctype" + j, fieldMappings); + } + mappings.put("index" + i, doctypesMappings); + } + return mappings; + } }