diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index bd80deda89c6f..e7ee5a059ab37 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -30,7 +30,7 @@ buildscript { apply plugin: 'elasticsearch.build' -// order of this seciont matters, see: https://github.com/johnrengelman/shadow/issues/336 +// order of this section matters, see: https://github.com/johnrengelman/shadow/issues/336 apply plugin: 'application' // have the shadow plugin provide the runShadow task mainClassName = 'org.openjdk.jmh.Main' apply plugin: 'com.github.johnrengelman.shadow' // build an uberjar with all benchmarks diff --git a/build.gradle b/build.gradle index 9bb08cf29dbbc..862943b50f5eb 100644 --- a/build.gradle +++ b/build.gradle @@ -326,6 +326,9 @@ gradle.projectsEvaluated { // :test:framework:test cannot run before and after :server:test return } + if (tasks.findByPath('test') != null && tasks.findByPath('integTest') != null) { + integTest.mustRunAfter test + } configurations.all { Configuration configuration -> /* * The featureAwarePlugin configuration has a dependency on x-pack:plugin:core and x-pack:plugin:core has a dependency on the @@ -575,3 +578,28 @@ gradle.projectsEvaluated { } } } + +if (System.properties.get("build.compare") != null) { + apply plugin: 'compare-gradle-builds' + compareGradleBuilds { + ext.referenceProject = System.properties.get("build.compare") + doFirst { + if (file(referenceProject).exists() == false) { + throw new GradleException( + "Use git worktree to check out a version to compare against to ../elasticsearch_build_reference" + ) + } + } + sourceBuild { + gradleVersion = "4.7" // does not default to gradle weapper of project dir, but current version + projectDir = referenceProject + tasks = ["clean", "assemble"] + arguments = ["-Dbuild.compare_friendly=true"] + } + targetBuild { + tasks = ["clean", "assemble"] + // use -Dorg.gradle.java.home= to alter jdk versions + arguments = ["-Dbuild.compare_friendly=true"] + } + } +} diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 5256968b6ca3e..9ae86a661cea2 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -17,11 +17,13 @@ * under the License. */ -import java.nio.file.Files -import org.gradle.util.GradleVersion +import java.nio.file.Files -apply plugin: 'groovy' +plugins { + id 'java-gradle-plugin' + id 'groovy' +} group = 'org.elasticsearch.gradle' @@ -83,9 +85,10 @@ repositories { } dependencies { - compile gradleApi() compile localGroovy() compile "com.carrotsearch.randomizedtesting:junit4-ant:${props.getProperty('randomizedrunner')}" + compile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}" + compile("junit:junit:${props.getProperty('junit')}") { transitive = false } @@ -97,8 +100,10 @@ dependencies { compile 'de.thetaphi:forbiddenapis:2.5' compile 'org.apache.rat:apache-rat:0.11' compile "org.elasticsearch:jna:4.5.1" + testCompile "junit:junit:${props.getProperty('junit')}" } + // Gradle 2.14+ removed ProgressLogger(-Factory) classes from the public APIs // Use logging dependency instead // Gradle 4.3.1 stopped releasing the logging jars to jcenter, just use the last available one @@ -106,7 +111,6 @@ GradleVersion logVersion = GradleVersion.current() > GradleVersion.version('4.3' dependencies { compileOnly "org.gradle:gradle-logging:${logVersion.getVersion()}" - compile 'ru.vyarus:gradle-animalsniffer-plugin:1.2.0' // Gradle 2.14 requires a version > 1.0.1 } /***************************************************************************** @@ -114,14 +118,12 @@ dependencies { *****************************************************************************/ // this will only happen when buildSrc is built on its own during build init if (project == rootProject) { - repositories { if (System.getProperty("repos.mavenLocal") != null) { mavenLocal() } mavenCentral() } - test.exclude 'org/elasticsearch/test/NamingConventionsCheckBadClasses*' } /***************************************************************************** @@ -146,9 +148,6 @@ if (project != rootProject) { jarHell.enabled = false thirdPartyAudit.enabled = false - // test for elasticsearch.build tries to run with ES... - test.enabled = false - // TODO: re-enable once randomizedtesting gradle code is published and removed from here licenseHeaders.enabled = false @@ -159,14 +158,7 @@ if (project != rootProject) { } namingConventions { - testClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$UnitTestCase' - integTestClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$IntegTestCase' - } - - task namingConventionsMain(type: org.elasticsearch.gradle.precommit.NamingConventionsTask) { - checkForTestsInMain = true - testClass = namingConventions.testClass - integTestClass = namingConventions.integTestClass + testClass = 'org.elasticsearch.gradle.test.GradleUnitTestCase' + integTestClass = 'org.elasticsearch.gradle.test.GradleIntegrationTestCase' } - precommit.dependsOn namingConventionsMain } diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy index c375f773bf9b5..24b66efbcef2c 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy @@ -1,20 +1,44 @@ package com.carrotsearch.gradle.junit4 import com.carrotsearch.ant.tasks.junit4.JUnit4 -import org.gradle.api.AntBuilder +import org.gradle.api.GradleException import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.Task +import org.gradle.api.UnknownTaskException import org.gradle.api.plugins.JavaBasePlugin import org.gradle.api.tasks.TaskContainer +import org.gradle.api.tasks.TaskProvider import org.gradle.api.tasks.testing.Test +import java.util.concurrent.atomic.AtomicBoolean + class RandomizedTestingPlugin implements Plugin { + static private AtomicBoolean sanityCheckConfigured = new AtomicBoolean(false) + void apply(Project project) { setupSeed(project) replaceTestTask(project.tasks) configureAnt(project.ant) + configureSanityCheck(project) + } + + private static void configureSanityCheck(Project project) { + // Check the task graph to confirm tasks were indeed replaced + // https://github.com/elastic/elasticsearch/issues/31324 + if (sanityCheckConfigured.getAndSet(true) == false) { + project.rootProject.getGradle().getTaskGraph().whenReady { + List nonConforming = project.getGradle().getTaskGraph().allTasks + .findAll { it.name == "test" } + .findAll { (it instanceof RandomizedTestingTask) == false} + .collect { "${it.path} -> ${it.class}" } + if (nonConforming.isEmpty() == false) { + throw new GradleException("Found the ${nonConforming.size()} `test` tasks:" + + "\n ${nonConforming.join("\n ")}") + } + } + } } /** @@ -45,29 +69,32 @@ class RandomizedTestingPlugin implements Plugin { } static void replaceTestTask(TaskContainer tasks) { - Test oldTestTask = tasks.findByPath('test') - if (oldTestTask == null) { + // Gradle 4.8 introduced lazy tasks, thus we deal both with the `test` task as well as it's provider + // https://github.com/gradle/gradle/issues/5730#issuecomment-398822153 + // since we can't be sure if the task was ever realized, we remove both the provider and the task + TaskProvider oldTestProvider + try { + oldTestProvider = tasks.getByNameLater(Test, 'test') + } catch (UnknownTaskException unused) { // no test task, ok, user will use testing task on their own return } - tasks.remove(oldTestTask) + Test oldTestTask = oldTestProvider.get() - Map properties = [ - name: 'test', - type: RandomizedTestingTask, - dependsOn: oldTestTask.dependsOn, - group: JavaBasePlugin.VERIFICATION_GROUP, - description: 'Runs unit tests with the randomized testing framework' - ] - RandomizedTestingTask newTestTask = tasks.create(properties) - newTestTask.classpath = oldTestTask.classpath - newTestTask.testClassesDir = oldTestTask.project.sourceSets.test.output.classesDir - // since gradle 4.5, tasks immutable dependencies are "hidden" (do not show up in dependsOn) - // so we must explicitly add a dependency on generating the test classpath - newTestTask.dependsOn('testClasses') + // we still have to use replace here despite the remove above because the task container knows about the provider + // by the same name + RandomizedTestingTask newTestTask = tasks.replace('test', RandomizedTestingTask) + newTestTask.configure{ + group = JavaBasePlugin.VERIFICATION_GROUP + description = 'Runs unit tests with the randomized testing framework' + dependsOn oldTestTask.dependsOn, 'testClasses' + classpath = oldTestTask.classpath + testClassesDirs = oldTestTask.project.sourceSets.test.output.classesDirs + } // hack so check task depends on custom test - Task checkTask = tasks.findByPath('check') + Task checkTask = tasks.getByName('check') + checkTask.dependsOn.remove(oldTestProvider) checkTask.dependsOn.remove(oldTestTask) checkTask.dependsOn.add(newTestTask) } diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy index 1817ea57e7abe..2b61165608d2d 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy @@ -6,18 +6,20 @@ import groovy.xml.NamespaceBuilder import groovy.xml.NamespaceBuilderSupport import org.apache.tools.ant.BuildException import org.apache.tools.ant.DefaultLogger +import org.apache.tools.ant.Project import org.apache.tools.ant.RuntimeConfigurable import org.apache.tools.ant.UnknownElement +import org.elasticsearch.gradle.BuildPlugin import org.gradle.api.DefaultTask import org.gradle.api.InvalidUserDataException import org.gradle.api.file.FileCollection import org.gradle.api.file.FileTreeElement -import org.gradle.api.internal.tasks.options.Option import org.gradle.api.specs.Spec import org.gradle.api.tasks.Input import org.gradle.api.tasks.InputDirectory import org.gradle.api.tasks.Optional import org.gradle.api.tasks.TaskAction +import org.gradle.api.tasks.options.Option import org.gradle.api.tasks.util.PatternFilterable import org.gradle.api.tasks.util.PatternSet import org.gradle.internal.logging.progress.ProgressLoggerFactory @@ -43,8 +45,8 @@ class RandomizedTestingTask extends DefaultTask { @Input String parallelism = '1' - @InputDirectory - File testClassesDir + @Input + FileCollection testClassesDirs @Optional @Input @@ -220,7 +222,7 @@ class RandomizedTestingTask extends DefaultTask { listener = new DefaultLogger( errorPrintStream: System.err, outputPrintStream: System.out, - messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO) + messageOutputLevel: Project.MSG_INFO) } else { // we want to buffer the info, and emit it if the test fails antLoggingBuffer = new ByteArrayOutputStream() @@ -228,7 +230,7 @@ class RandomizedTestingTask extends DefaultTask { listener = new DefaultLogger( errorPrintStream: stream, outputPrintStream: stream, - messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO) + messageOutputLevel: Project.MSG_INFO) } project.ant.project.addBuildListener(listener) } @@ -251,12 +253,10 @@ class RandomizedTestingTask extends DefaultTask { if (argLine != null) { jvmarg(line: argLine) } - fileset(dir: testClassesDir) { - for (String includePattern : patternSet.getIncludes()) { - include(name: includePattern) - } - for (String excludePattern : patternSet.getExcludes()) { - exclude(name: excludePattern) + testClassesDirs.each { testClassDir -> + fileset(dir: testClassDir) { + patternSet.getIncludes().each { include(name: it) } + patternSet.getExcludes().each { exclude(name: it) } } } for (Map.Entry prop : systemProperties) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index eb3cd1dc8c6da..8a2b1b798e163 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -348,7 +348,9 @@ class BuildPlugin implements Plugin { // just a self contained test-fixture configuration, likely transitive and hellacious return } - configuration.resolutionStrategy.failOnVersionConflict() + configuration.resolutionStrategy { + failOnVersionConflict() + } }) // force all dependencies added directly to compile/testCompile to be non-transitive, except for ES itself @@ -475,13 +477,17 @@ class BuildPlugin implements Plugin { } } - project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom t -> - // place the pom next to the jar it is for - t.destination = new File(project.buildDir, "distributions/${project.archivesBaseName}-${project.version}.pom") - // build poms with assemble (if the assemble task exists) - Task assemble = project.tasks.findByName('assemble') - if (assemble) { - assemble.dependsOn(t) + // Work around Gradle 4.8 issue until we `enableFeaturePreview('STABLE_PUBLISHING')` + // https://github.com/gradle/gradle/issues/5696#issuecomment-396965185 + project.getGradle().getTaskGraph().whenReady { + project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom t -> + // place the pom next to the jar it is for + t.destination = new File(project.buildDir, "distributions/${project.archivesBaseName}-${project.version}.pom") + // build poms with assemble (if the assemble task exists) + Task assemble = project.tasks.findByName('assemble') + if (assemble) { + assemble.dependsOn(t) + } } } } @@ -625,6 +631,10 @@ class BuildPlugin implements Plugin { jarTask.manifest.attributes('Change': shortHash) } } + // Force manifest entries that change by nature to a constant to be able to compare builds more effectively + if (System.properties.getProperty("build.compare_friendly", "false") == "true") { + jarTask.manifest.getAttributes().clear() + } } // add license/notice files project.afterEvaluate { @@ -741,7 +751,7 @@ class BuildPlugin implements Plugin { project.extensions.add('additionalTest', { String name, Closure config -> RandomizedTestingTask additionalTest = project.tasks.create(name, RandomizedTestingTask.class) additionalTest.classpath = test.classpath - additionalTest.testClassesDir = test.testClassesDir + additionalTest.testClassesDirs = test.testClassesDirs additionalTest.configure(commonTestConfig(project)) additionalTest.configure(config) additionalTest.dependsOn(project.tasks.testClasses) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.groovy deleted file mode 100644 index b1b04a2ded684..0000000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.groovy +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle - -import org.gradle.api.GradleException -import org.gradle.api.tasks.Exec - -/** - * A wrapper around gradle's Exec task to capture output and log on error. - */ -class LoggedExec extends Exec { - - protected ByteArrayOutputStream output = new ByteArrayOutputStream() - - LoggedExec() { - if (logger.isInfoEnabled() == false) { - standardOutput = output - errorOutput = output - ignoreExitValue = true - doLast { - if (execResult.exitValue != 0) { - output.toString('UTF-8').eachLine { line -> logger.error(line) } - throw new GradleException("Process '${executable} ${args.join(' ')}' finished with non-zero exit value ${execResult.exitValue}") - } - } - } - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.java b/buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.java new file mode 100644 index 0000000000000..7f51c4fb3987d --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/LoggedExec.java @@ -0,0 +1,41 @@ +package org.elasticsearch.gradle; + +import groovy.lang.Closure; +import org.gradle.api.GradleException; +import org.gradle.api.Task; +import org.gradle.api.tasks.Exec; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.stream.Collectors; + +/** + * A wrapper around gradle's Exec task to capture output and log on error. + */ +public class LoggedExec extends Exec { + + protected ByteArrayOutputStream output = new ByteArrayOutputStream(); + + public LoggedExec() { + if (getLogger().isInfoEnabled() == false) { + setStandardOutput(output); + setErrorOutput(output); + setIgnoreExitValue(true); + doLast(new Closure(this, this) { + public void doCall(Task it) throws IOException { + if (getExecResult().getExitValue() != 0) { + for (String line : output.toString("UTF-8").split("\\R")) { + getLogger().error(line); + } + throw new GradleException( + "Process \'" + getExecutable() + " " + + getArgs().stream().collect(Collectors.joining(" "))+ + "\' finished with non-zero exit value " + + String.valueOf(getExecResult().getExitValue()) + ); + } + } + }); + } + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionProperties.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionProperties.groovy deleted file mode 100644 index 6983d12872f23..0000000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionProperties.groovy +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.gradle - -/** - * Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions. - */ -class VersionProperties { - static final Version elasticsearch - static final String lucene - static final Map versions = new HashMap<>() - static { - Properties props = new Properties() - InputStream propsStream = VersionProperties.class.getResourceAsStream('/version.properties') - if (propsStream == null) { - throw new RuntimeException('/version.properties resource missing') - } - props.load(propsStream) - elasticsearch = Version.fromString(props.getProperty('elasticsearch')) - lucene = props.getProperty('lucene') - for (String property : props.stringPropertyNames()) { - versions.put(property, props.getProperty(property)) - } - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionProperties.java b/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionProperties.java new file mode 100644 index 0000000000000..9ee597eb25ad8 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionProperties.java @@ -0,0 +1,50 @@ +package org.elasticsearch.gradle; + +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +/** + * Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions. + */ +public class VersionProperties { + public static Version getElasticsearch() { + return elasticsearch; + } + + public static String getLucene() { + return lucene; + } + + public static Map getVersions() { + return versions; + } + + private static final Version elasticsearch; + private static final String lucene; + private static final Map versions = new HashMap(); + static { + Properties props = getVersionProperties(); + elasticsearch = Version.fromString(props.getProperty("elasticsearch")); + lucene = props.getProperty("lucene"); + for (String property : props.stringPropertyNames()) { + versions.put(property, props.getProperty(property)); + } + } + + private static Properties getVersionProperties() { + Properties props = new Properties(); + InputStream propsStream = VersionProperties.class.getResourceAsStream("/version.properties"); + if (propsStream == null) { + throw new RuntimeException("/version.properties resource missing"); + } + try { + props.load(propsStream); + } catch (IOException e) { + throw new RuntimeException(e); + } + return props; + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy index 3c056a5528b5e..f2105086f2553 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy @@ -237,6 +237,18 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { current.println(" - stash_in_path") current.println(" - stash_path_replace") current.println(" - warnings") + if (test.testEnv != null) { + switch (test.testEnv) { + case 'basic': + case 'gold': + case 'platinum': + current.println(" - xpack") + break; + default: + throw new InvalidUserDataException('Unsupported testEnv: ' + + test.testEnv) + } + } } if (test.skipTest) { if (test.continued) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy index 7132361e16361..ec012633f0893 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy @@ -84,6 +84,7 @@ public class SnippetsTask extends DefaultTask { Snippet snippet = null StringBuilder contents = null List substitutions = null + String testEnv = null Closure emit = { snippet.contents = contents.toString() contents = null @@ -143,10 +144,14 @@ public class SnippetsTask extends DefaultTask { } file.eachLine('UTF-8') { String line, int lineNumber -> Matcher matcher + matcher = line =~ /\[testenv="([^"]+)"\]\s*/ + if (matcher.matches()) { + testEnv = matcher.group(1) + } if (line ==~ /-{4,}\s*/) { // Four dashes looks like a snippet if (snippet == null) { Path path = docs.dir.toPath().relativize(file.toPath()) - snippet = new Snippet(path: path, start: lineNumber) + snippet = new Snippet(path: path, start: lineNumber, testEnv: testEnv) if (lastLanguageLine == lineNumber - 1) { snippet.language = lastLanguage } @@ -297,6 +302,7 @@ public class SnippetsTask extends DefaultTask { int start int end = NOT_FINISHED String contents + String testEnv Boolean console = null boolean test = false @@ -321,6 +327,9 @@ public class SnippetsTask extends DefaultTask { } if (test) { result += '// TEST' + if (testEnv != null) { + result += "[testenv=$testEnv]" + } if (catchPart) { result += "[catch: $catchPart]" } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LoggerUsageTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LoggerUsageTask.groovy index 87b73795604ab..ac1e12620af87 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LoggerUsageTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/LoggerUsageTask.groovy @@ -50,11 +50,11 @@ public class LoggerUsageTask extends LoggedExec { List files = [] // But only if the source sets that will make them exist if (project.sourceSets.findByName("main")) { - files.add(project.sourceSets.main.output.classesDir) + files.addAll(project.sourceSets.main.output.classesDirs.getFiles()) dependsOn project.tasks.classes } if (project.sourceSets.findByName("test")) { - files.add(project.sourceSets.test.output.classesDir) + files.addAll(project.sourceSets.test.output.classesDirs.getFiles()) dependsOn project.tasks.testClasses } /* In an extra twist, it isn't good enough that the source set diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.groovy deleted file mode 100644 index 6050d4e278dd6..0000000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.groovy +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle.precommit - -import org.elasticsearch.gradle.LoggedExec -import org.elasticsearch.gradle.VersionProperties -import org.gradle.api.artifacts.Dependency -import org.gradle.api.file.FileCollection -import org.gradle.api.tasks.Input -import org.gradle.api.tasks.InputFiles -import org.gradle.api.tasks.OutputFile -/** - * Runs NamingConventionsCheck on a classpath/directory combo to verify that - * tests are named according to our conventions so they'll be picked up by - * gradle. Read the Javadoc for NamingConventionsCheck to learn more. - */ -public class NamingConventionsTask extends LoggedExec { - /** - * We use a simple "marker" file that we touch when the task succeeds - * as the task output. This is compared against the modified time of the - * inputs (ie the jars/class files). - */ - @OutputFile - File successMarker = new File(project.buildDir, "markers/${this.name}") - - /** - * Should we skip the integ tests in disguise tests? Defaults to true because only core names its - * integ tests correctly. - */ - @Input - boolean skipIntegTestInDisguise = false - - /** - * Superclass for all tests. - */ - @Input - String testClass = 'org.apache.lucene.util.LuceneTestCase' - - /** - * Superclass for all integration tests. - */ - @Input - String integTestClass = 'org.elasticsearch.test.ESIntegTestCase' - - /** - * Should the test also check the main classpath for test classes instead of - * doing the usual checks to the test classpath. - */ - @Input - boolean checkForTestsInMain = false; - - public NamingConventionsTask() { - // Extra classpath contains the actual test - if (false == project.configurations.names.contains('namingConventions')) { - project.configurations.create('namingConventions') - Dependency buildToolsDep = project.dependencies.add('namingConventions', - "org.elasticsearch.gradle:build-tools:${VersionProperties.elasticsearch}") - buildToolsDep.transitive = false // We don't need gradle in the classpath. It conflicts. - } - FileCollection classpath = project.files(project.configurations.namingConventions, - project.sourceSets.test.compileClasspath, - project.sourceSets.test.output) - dependsOn(classpath) - inputs.files(classpath) - description = "Tests that test classes aren't misnamed or misplaced" - executable = new File(project.runtimeJavaHome, 'bin/java') - if (false == checkForTestsInMain) { - /* This task is created by default for all subprojects with this - * setting and there is no point in running it if the files don't - * exist. */ - onlyIf { project.sourceSets.test.output.classesDir.exists() } - } - - /* - * We build the arguments in a funny afterEvaluate/doFirst closure so that we can wait for the classpath to be - * ready for us. Strangely neither one on their own are good enough. - */ - project.afterEvaluate { - doFirst { - args('-Djna.nosys=true') - args('-cp', classpath.asPath, 'org.elasticsearch.test.NamingConventionsCheck') - args('--test-class', testClass) - if (skipIntegTestInDisguise) { - args('--skip-integ-tests-in-disguise') - } else { - args('--integ-test-class', integTestClass) - } - /* - * The test framework has classes that fail the checks to validate that the checks fail properly. - * Since these would cause the build to fail we have to ignore them with this parameter. The - * process of ignoring them lets us validate that they were found so this ignore parameter acts - * as the test for the NamingConventionsCheck. - */ - if (':build-tools'.equals(project.path)) { - args('--self-test') - } - if (checkForTestsInMain) { - args('--main') - args('--') - args(project.sourceSets.main.output.classesDir.absolutePath) - } else { - args('--') - args(project.sourceSets.test.output.classesDir.absolutePath) - } - } - } - doLast { successMarker.setText("", 'UTF-8') } - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.java b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.java new file mode 100644 index 0000000000000..7b63899de31ee --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.java @@ -0,0 +1,185 @@ +package org.elasticsearch.gradle.precommit; + +import groovy.lang.Closure; +import org.codehaus.groovy.runtime.ResourceGroovyMethods; +import org.elasticsearch.gradle.LoggedExec; +import org.elasticsearch.test.NamingConventionsCheck; +import org.gradle.api.GradleException; +import org.gradle.api.Project; +import org.gradle.api.Task; +import org.gradle.api.file.FileCollection; +import org.gradle.api.plugins.ExtraPropertiesExtension; +import org.gradle.api.plugins.JavaPluginConvention; +import org.gradle.api.tasks.AbstractExecTask; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.SourceSetContainer; + +import java.io.File; +import java.io.IOException; +import java.util.Objects; + +/** + * Runs NamingConventionsCheck on a classpath/directory combo to verify that + * tests are named according to our conventions so they'll be picked up by + * gradle. Read the Javadoc for NamingConventionsCheck to learn more. + */ +public class NamingConventionsTask extends LoggedExec { + public NamingConventionsTask() { + setDescription("Tests that test classes aren't misnamed or misplaced"); + final Project project = getProject(); + + SourceSetContainer sourceSets = getJavaSourceSets(); + final FileCollection classpath = project.files( + // This works because the class only depends on one class from junit that will be available from the + // tests compile classpath. It's the most straight forward way of telling Java where to find the main + // class. + NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation().getPath(), + // the tests to be loaded + checkForTestsInMain ? sourceSets.getByName("main").getRuntimeClasspath() : project.files(), + sourceSets.getByName("test").getCompileClasspath(), + sourceSets.getByName("test").getOutput() + ); + dependsOn(project.getTasks().matching(it -> "testCompileClasspath".equals(it.getName()))); + getInputs().files(classpath); + + setExecutable(new File( + Objects.requireNonNull( + project.getExtensions().getByType(ExtraPropertiesExtension.class).get("runtimeJavaHome") + ).toString(), + "bin/java") + ); + + if (checkForTestsInMain == false) { + /* This task is created by default for all subprojects with this + * setting and there is no point in running it if the files don't + * exist. */ + onlyIf((unused) -> getExistingClassesDirs().isEmpty() == false); + } + + /* + * We build the arguments in a funny afterEvaluate/doFirst closure so that we can wait for the classpath to be + * ready for us. Strangely neither one on their own are good enough. + */ + project.afterEvaluate(new Closure(this, this) { + public Task doCall(Project it) { + return doFirst(new Closure(NamingConventionsTask.this, NamingConventionsTask.this) { + public AbstractExecTask doCall(Task it) { + args("-Djna.nosys=true"); + args("-cp", classpath.getAsPath(), "org.elasticsearch.test.NamingConventionsCheck"); + args("--test-class", getTestClass()); + if (skipIntegTestInDisguise) { + args("--skip-integ-tests-in-disguise"); + } else { + args("--integ-test-class", getIntegTestClass()); + } + if (getCheckForTestsInMain()) { + args("--main"); + args("--"); + } else { + args("--"); + } + return args(getExistingClassesDirs().getAsPath()); + } + }); + } + }); + doLast(new Closure(this, this) { + public void doCall(Task it) { + try { + ResourceGroovyMethods.setText(getSuccessMarker(), "", "UTF-8"); + } catch (IOException e) { + throw new GradleException("io exception", e); + } + } + }); + } + + private SourceSetContainer getJavaSourceSets() { + return getProject().getConvention().getPlugin(JavaPluginConvention.class).getSourceSets(); + } + + public FileCollection getExistingClassesDirs() { + FileCollection classesDirs = getJavaSourceSets().getByName(checkForTestsInMain ? "main" : "test") + .getOutput().getClassesDirs(); + return classesDirs.filter(it -> it.exists()); + } + + public File getSuccessMarker() { + return successMarker; + } + + public void setSuccessMarker(File successMarker) { + this.successMarker = successMarker; + } + + public boolean getSkipIntegTestInDisguise() { + return skipIntegTestInDisguise; + } + + public boolean isSkipIntegTestInDisguise() { + return skipIntegTestInDisguise; + } + + public void setSkipIntegTestInDisguise(boolean skipIntegTestInDisguise) { + this.skipIntegTestInDisguise = skipIntegTestInDisguise; + } + + public String getTestClass() { + return testClass; + } + + public void setTestClass(String testClass) { + this.testClass = testClass; + } + + public String getIntegTestClass() { + return integTestClass; + } + + public void setIntegTestClass(String integTestClass) { + this.integTestClass = integTestClass; + } + + public boolean getCheckForTestsInMain() { + return checkForTestsInMain; + } + + public boolean isCheckForTestsInMain() { + return checkForTestsInMain; + } + + public void setCheckForTestsInMain(boolean checkForTestsInMain) { + this.checkForTestsInMain = checkForTestsInMain; + } + + /** + * We use a simple "marker" file that we touch when the task succeeds + * as the task output. This is compared against the modified time of the + * inputs (ie the jars/class files). + */ + @OutputFile + private File successMarker = new File(getProject().getBuildDir(), "markers/" + this.getName()); + /** + * Should we skip the integ tests in disguise tests? Defaults to true because only core names its + * integ tests correctly. + */ + @Input + private boolean skipIntegTestInDisguise = false; + /** + * Superclass for all tests. + */ + @Input + private String testClass = "org.apache.lucene.util.LuceneTestCase"; + /** + * Superclass for all integration tests. + */ + @Input + private String integTestClass = "org.elasticsearch.test.ESIntegTestCase"; + /** + * Should the test also check the main classpath for test classes instead of + * doing the usual checks to the test classpath. + */ + @Input + private boolean checkForTestsInMain = false; +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 2ac0e22a82bc5..f2e6dc8e56186 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -61,7 +61,7 @@ public class RestIntegTestTask extends DefaultTask { clusterInit = project.tasks.create(name: "${name}Cluster#init", dependsOn: project.testClasses) runner.dependsOn(clusterInit) runner.classpath = project.sourceSets.test.runtimeClasspath - runner.testClassesDir = project.sourceSets.test.output.classesDir + runner.testClassesDirs = project.sourceSets.test.output.classesDirs clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project) // start with the common test configuration diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy index 5eec829dfa1ba..3e1f62f96e6bd 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy @@ -47,7 +47,7 @@ public class StandaloneTestPlugin implements Plugin { test.configure(BuildPlugin.commonTestConfig(project)) BuildPlugin.configureCompile(project) test.classpath = project.sourceSets.test.runtimeClasspath - test.testClassesDir project.sourceSets.test.output.classesDir + test.testClassesDirs = project.sourceSets.test.output.classesDirs test.mustRunAfter(project.precommit) project.check.dependsOn(test) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy index aab120e8d049a..161584938bde8 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy @@ -22,14 +22,9 @@ import org.apache.commons.io.output.TeeOutputStream import org.elasticsearch.gradle.LoggedExec import org.gradle.api.tasks.Input import org.gradle.api.tasks.Optional -import org.gradle.api.tasks.TaskAction import org.gradle.internal.logging.progress.ProgressLoggerFactory import javax.inject.Inject -import java.util.concurrent.CountDownLatch -import java.util.concurrent.locks.Lock -import java.util.concurrent.locks.ReadWriteLock -import java.util.concurrent.locks.ReentrantLock /** * Runs a vagrant command. Pretty much like Exec task but with a nicer output diff --git a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java index 9bd14675d34a4..58e95cfc00232 100644 --- a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java +++ b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java @@ -19,6 +19,7 @@ package org.elasticsearch.test; +import java.io.File; import java.io.IOException; import java.lang.reflect.Modifier; import java.nio.file.FileVisitResult; @@ -30,6 +31,7 @@ import java.util.HashSet; import java.util.Objects; import java.util.Set; +import java.util.regex.Pattern; /** * Checks that all tests in a directory are named according to our naming conventions. This is important because tests that do not follow @@ -37,19 +39,13 @@ * a class with a main method so gradle can call it for each project. This has the advantage of allowing gradle to calculate when it is * {@code UP-TO-DATE} so it can be skipped if the compiled classes haven't changed. This is useful on large modules for which checking all * the modules can be slow. - * - * Annoyingly, this cannot be tested using standard unit tests because to do so you'd have to declare classes that violate the rules. That - * would cause the test fail which would prevent the build from passing. So we have to make a mechanism for removing those test classes. Now - * that we have such a mechanism it isn't much work to fail the process if we don't detect the offending classes. Thus, the funky - * {@code --self-test} that is only run in the test:framework project. */ public class NamingConventionsCheck { public static void main(String[] args) throws IOException { Class testClass = null; Class integTestClass = null; - Path rootPath = null; + String rootPathList = null; boolean skipIntegTestsInDisguise = false; - boolean selfTest = false; boolean checkMainClasses = false; for (int i = 0; i < args.length; i++) { String arg = args[i]; @@ -63,14 +59,11 @@ public static void main(String[] args) throws IOException { case "--skip-integ-tests-in-disguise": skipIntegTestsInDisguise = true; break; - case "--self-test": - selfTest = true; - break; case "--main": checkMainClasses = true; break; case "--": - rootPath = Paths.get(args[++i]); + rootPathList = args[++i]; break; default: fail("unsupported argument '" + arg + "'"); @@ -78,44 +71,49 @@ public static void main(String[] args) throws IOException { } NamingConventionsCheck check = new NamingConventionsCheck(testClass, integTestClass); - if (checkMainClasses) { - check.checkMain(rootPath); - } else { - check.checkTests(rootPath, skipIntegTestsInDisguise); - } - - if (selfTest) { + for (String rootDir : rootPathList.split(Pattern.quote(File.pathSeparator))) { + Path rootPath = Paths.get(rootDir); if (checkMainClasses) { - assertViolation(NamingConventionsCheckInMainTests.class.getName(), check.testsInMain); - assertViolation(NamingConventionsCheckInMainIT.class.getName(), check.testsInMain); + check.checkMain(rootPath); } else { - assertViolation("WrongName", check.missingSuffix); - assertViolation("WrongNameTheSecond", check.missingSuffix); - assertViolation("DummyAbstractTests", check.notRunnable); - assertViolation("DummyInterfaceTests", check.notRunnable); - assertViolation("InnerTests", check.innerClasses); - assertViolation("NotImplementingTests", check.notImplementing); - assertViolation("PlainUnit", check.pureUnitTest); + check.checkTests(rootPath, skipIntegTestsInDisguise); } } // Now we should have no violations - assertNoViolations( + int exitCode = 0 ; + exitCode += countAndPrintViolations( "Not all subclasses of " + check.testClass.getSimpleName() + " match the naming convention. Concrete classes must end with [Tests]", - check.missingSuffix); - assertNoViolations("Classes ending with [Tests] are abstract or interfaces", check.notRunnable); - assertNoViolations("Found inner classes that are tests, which are excluded from the test runner", check.innerClasses); - assertNoViolations("Pure Unit-Test found must subclass [" + check.testClass.getSimpleName() + "]", check.pureUnitTest); - assertNoViolations("Classes ending with [Tests] must subclass [" + check.testClass.getSimpleName() + "]", check.notImplementing); - assertNoViolations( - "Classes ending with [Tests] or [IT] or extending [" + check.testClass.getSimpleName() + "] must be in src/test/java", - check.testsInMain); + check.missingSuffix) ; + exitCode += countAndPrintViolations( + "Classes ending with [Tests] are abstract or interfaces", + check.notRunnable + ); + exitCode += countAndPrintViolations( + "Found inner classes that are tests, which are excluded from the test runner", + check.innerClasses + ); + exitCode += countAndPrintViolations( + "Pure Unit-Test found must subclass [" + check.testClass.getSimpleName() + "]", + check.pureUnitTest + ); + exitCode += countAndPrintViolations( + "Classes ending with [Tests] must subclass [" + check.testClass.getSimpleName() + "]", + check.notImplementing + ); + exitCode += countAndPrintViolations( + "Classes ending with [Tests] or [IT] or extending [" + + check.testClass.getSimpleName() + "] must be in src/test/java", + check.testsInMain + ); if (skipIntegTestsInDisguise == false) { - assertNoViolations( - "Subclasses of " + check.integTestClass.getSimpleName() + " should end with IT as they are integration tests", - check.integTestsInDisguise); + exitCode += countAndPrintViolations("Subclasses of " + check.integTestClass.getSimpleName() + + " should end with IT as they are integration tests", + check.integTestsInDisguise + ); } + System.exit(exitCode); } private final Set> notImplementing = new HashSet<>(); @@ -138,7 +136,9 @@ public void checkTests(Path rootPath, boolean skipTestsInDisguised) throws IOExc Files.walkFileTree(rootPath, new TestClassVisitor() { @Override protected void visitTestClass(Class clazz) { - if (skipTestsInDisguised == false && integTestClass.isAssignableFrom(clazz)) { + if (skipTestsInDisguised == false && + integTestClass.isAssignableFrom(clazz) && + clazz != integTestClass) { integTestsInDisguise.add(clazz); } if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) { @@ -196,18 +196,15 @@ protected void visitOtherClass(Class clazz) { } - /** - * Fail the process if there are any violations in the set. Named to look like a junit assertion even though it isn't because it is - * similar enough. - */ - private static void assertNoViolations(String message, Set> set) { + private static int countAndPrintViolations(String message, Set> set) { if (false == set.isEmpty()) { System.err.println(message + ":"); for (Class bad : set) { System.err.println(" * " + bad.getName()); } - System.exit(1); + return 1; } + return 0; } /** @@ -254,15 +251,16 @@ abstract class TestClassVisitor implements FileVisitor { * Visit classes named like a test. */ protected abstract void visitTestClass(Class clazz); + /** * Visit classes named like an integration test. */ protected abstract void visitIntegrationTestClass(Class clazz); + /** * Visit classes not named like a test at all. */ protected abstract void visitOtherClass(Class clazz); - @Override public final FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { // First we visit the root directory @@ -310,5 +308,7 @@ protected boolean isTestCase(Class clazz) { public final FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { throw exc; } + } + } diff --git a/buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTest.groovy b/buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTests.groovy similarity index 98% rename from buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTest.groovy rename to buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTests.groovy index 14f6d1b8523f7..2901acf65220a 100644 --- a/buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTest.groovy +++ b/buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTests.groovy @@ -1,6 +1,9 @@ package org.elasticsearch.gradle -class VersionCollectionTest extends GroovyTestCase { +import org.elasticsearch.gradle.test.GradleUnitTestCase +import org.junit.Test + +class VersionCollectionTests extends GradleUnitTestCase { String formatVersion(String version) { return " public static final Version V_${version.replaceAll("\\.", "_")} " @@ -16,6 +19,7 @@ class VersionCollectionTest extends GroovyTestCase { * branched from Major-1.x At the time of this writing 6.2 is unreleased and 6.3 is the 6.x branch. This test simulates the behavior * from 7.0 perspective, or master at the time of this writing. */ + @Test void testAgainstMajorUnreleasedWithExistingStagedMinorRelease() { VersionCollection vc = new VersionCollection(allVersions) assertNotNull(vc) @@ -51,6 +55,7 @@ class VersionCollectionTest extends GroovyTestCase { * unreleased minor is released. At the time of this writing 6.2 is unreleased, so adding a 6.2.1 simulates a 6.2 release. This test * simulates the behavior from 7.0 perspective, or master at the time of this writing. */ + @Test void testAgainstMajorUnreleasedWithoutStagedMinorRelease() { List localVersion = allVersions.clone() localVersion.add(formatVersion('6.2.1')) // release 6.2 @@ -89,6 +94,7 @@ class VersionCollectionTest extends GroovyTestCase { * branched from Major.x At the time of this writing 6.2 is unreleased and 6.3 is the 6.x branch. This test simulates the behavior * from 6.3 perspective. */ + @Test void testAgainstMinorReleasedBranch() { List localVersion = allVersions.clone() localVersion.removeAll { it.toString().contains('7_0_0')} // remove all the 7.x so that the actual version is 6.3 (6.x) @@ -126,6 +132,7 @@ class VersionCollectionTest extends GroovyTestCase { * unreleased minor is released. At the time of this writing 6.2 is unreleased, so adding a 6.2.1 simulates a 6.2 release. This test * simulates the behavior from 6.3 perspective. */ + @Test void testAgainstMinorReleasedBranchNoStagedMinor() { List localVersion = allVersions.clone() // remove all the 7.x and add a 6.2.1 which means 6.2 was released @@ -162,6 +169,7 @@ class VersionCollectionTest extends GroovyTestCase { * This validates the logic of being on a released minor branch. At the time of writing, 6.2 is unreleased, so this is equivalent of being * on 6.1. */ + @Test void testAgainstOldMinor() { List localVersion = allVersions.clone() @@ -195,6 +203,7 @@ class VersionCollectionTest extends GroovyTestCase { * This validates the lower bound of wire compat, which is 5.0. It also validates that the span of 2.x to 5.x if it is decided to port * this fix all the way to the maint 5.6 release. */ + @Test void testFloorOfWireCompatVersions() { List localVersion = [formatVersion('2.0.0'), formatVersion('2.0.1'), formatVersion('2.1.0'), formatVersion('2.1.1'), formatVersion('5.0.0'), formatVersion('5.0.1'), formatVersion('5.1.0'), formatVersion('5.1.1'), diff --git a/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy b/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestFromSnippetsTaskTests.groovy similarity index 74% rename from buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy rename to buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestFromSnippetsTaskTests.groovy index b986319492001..df20f542f9c39 100644 --- a/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTaskTest.groovy +++ b/buildSrc/src/test/groovy/org/elasticsearch/gradle/doc/RestTestFromSnippetsTaskTests.groovy @@ -19,31 +19,41 @@ package org.elasticsearch.gradle.doc -import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.shouldAddShardFailureCheck +import org.elasticsearch.gradle.test.GradleUnitTestCase +import org.gradle.api.InvalidUserDataException +import org.junit.Rule +import org.junit.rules.ExpectedException + import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.replaceBlockQuote +import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.shouldAddShardFailureCheck + +class RestTestFromSnippetsTaskTests extends GradleUnitTestCase { + + @Rule + public ExpectedException expectedEx = ExpectedException.none() -class RestTestFromSnippetsTaskTest extends GroovyTestCase { void testInvalidBlockQuote() { - String input = "\"foo\": \"\"\"bar\""; - String message = shouldFail({ replaceBlockQuote(input) }); - assertEquals("Invalid block quote starting at 7 in:\n$input", message); + String input = "\"foo\": \"\"\"bar\"" + expectedEx.expect(InvalidUserDataException.class) + expectedEx.expectMessage("Invalid block quote starting at 7 in:\n$input") + replaceBlockQuote(input) } void testSimpleBlockQuote() { assertEquals("\"foo\": \"bort baz\"", - replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\"")); + replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\"")) } void testMultipleBlockQuotes() { assertEquals("\"foo\": \"bort baz\", \"bar\": \"other\"", - replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\"")); + replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\"")) } void testEscapingInBlockQuote() { assertEquals("\"foo\": \"bort\\\" baz\"", - replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\"")); + replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\"")) assertEquals("\"foo\": \"bort\\n baz\"", - replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"")); + replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"")) } void testIsDocWriteRequest() { diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/NamingConventionsTaskIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/NamingConventionsTaskIT.java new file mode 100644 index 0000000000000..7e469e8597ddd --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/NamingConventionsTaskIT.java @@ -0,0 +1,75 @@ +package org.elasticsearch.gradle.precommit; + +import org.elasticsearch.gradle.test.GradleIntegrationTestCase; +import org.gradle.testkit.runner.BuildResult; +import org.gradle.testkit.runner.GradleRunner; +import org.gradle.testkit.runner.TaskOutcome; + +import java.util.Arrays; + +public class NamingConventionsTaskIT extends GradleIntegrationTestCase { + + public void testPluginCanBeApplied() { + BuildResult result = GradleRunner.create() + .withProjectDir(getProjectDir("namingConventionsSelfTest")) + .withArguments("hello", "-s", "-PcheckForTestsInMain=false") + .withPluginClasspath() + .build(); + + assertEquals(TaskOutcome.SUCCESS, result.task(":hello").getOutcome()); + String output = result.getOutput(); + assertTrue(output, output.contains("build plugin can be applied")); + } + + public void testNameCheckFailsAsItShould() { + BuildResult result = GradleRunner.create() + .withProjectDir(getProjectDir("namingConventionsSelfTest")) + .withArguments("namingConventions", "-s", "-PcheckForTestsInMain=false") + .withPluginClasspath() + .buildAndFail(); + + assertNotNull("task did not run", result.task(":namingConventions")); + assertEquals(TaskOutcome.FAILED, result.task(":namingConventions").getOutcome()); + String output = result.getOutput(); + for (String line : Arrays.asList( + "Found inner classes that are tests, which are excluded from the test runner:", + "* org.elasticsearch.test.NamingConventionsCheckInMainIT$InternalInvalidTests", + "Classes ending with [Tests] must subclass [UnitTestCase]:", + "* org.elasticsearch.test.NamingConventionsCheckInMainTests", + "* org.elasticsearch.test.NamingConventionsCheckInMainIT", + "Not all subclasses of UnitTestCase match the naming convention. Concrete classes must end with [Tests]:", + "* org.elasticsearch.test.WrongName")) { + assertTrue( + "expected: '" + line + "' but it was not found in the output:\n" + output, + output.contains(line) + ); + } + } + + public void testNameCheckFailsAsItShouldWithMain() { + BuildResult result = GradleRunner.create() + .withProjectDir(getProjectDir("namingConventionsSelfTest")) + .withArguments("namingConventions", "-s", "-PcheckForTestsInMain=true") + .withPluginClasspath() + .buildAndFail(); + + assertNotNull("task did not run", result.task(":namingConventions")); + assertEquals(TaskOutcome.FAILED, result.task(":namingConventions").getOutcome()); + + String output = result.getOutput(); + for (String line : Arrays.asList( + "Classes ending with [Tests] or [IT] or extending [UnitTestCase] must be in src/test/java:", + "* org.elasticsearch.test.NamingConventionsCheckBadClasses$DummyInterfaceTests", + "* org.elasticsearch.test.NamingConventionsCheckBadClasses$DummyAbstractTests", + "* org.elasticsearch.test.NamingConventionsCheckBadClasses$InnerTests", + "* org.elasticsearch.test.NamingConventionsCheckBadClasses$NotImplementingTests", + "* org.elasticsearch.test.NamingConventionsCheckBadClasses$WrongNameTheSecond", + "* org.elasticsearch.test.NamingConventionsCheckBadClasses$WrongName")) { + assertTrue( + "expected: '" + line + "' but it was not found in the output:\n"+output, + output.contains(line) + ); + } + } + +} diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AwsS3Service.java b/buildSrc/src/test/java/org/elasticsearch/gradle/test/BaseTestCase.java similarity index 50% rename from plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AwsS3Service.java rename to buildSrc/src/test/java/org/elasticsearch/gradle/test/BaseTestCase.java index 03b06c5b1bd34..48a62f8900fae 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AwsS3Service.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/test/BaseTestCase.java @@ -16,28 +16,18 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.gradle.test; -package org.elasticsearch.repositories.s3; - -import java.io.Closeable; -import java.util.Map; - -interface AwsS3Service extends Closeable { - - /** - * Creates then caches an {@code AmazonS3} client using the current client - * settings. Returns an {@code AmazonS3Reference} wrapper which has to be - * released as soon as it is not needed anymore. - */ - AmazonS3Reference client(String clientName); - - /** - * Updates settings for building clients and clears the client cache. Future - * client requests will use the new settings to lazily build new clients. - * - * @param clientsSettings the new refreshed settings - * @return the old stale settings - */ - Map refreshAndClearCache(Map clientsSettings); +import com.carrotsearch.randomizedtesting.JUnit4MethodProvider; +import com.carrotsearch.randomizedtesting.RandomizedRunner; +import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders; +import org.junit.Assert; +import org.junit.runner.RunWith; +@RunWith(RandomizedRunner.class) +@TestMethodProviders({ + JUnit4MethodProvider.class, + JUnit3MethodProvider.class +}) +public abstract class BaseTestCase extends Assert { } diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java b/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java new file mode 100644 index 0000000000000..26da663182f7c --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java @@ -0,0 +1,16 @@ +package org.elasticsearch.gradle.test; + +import java.io.File; + +public abstract class GradleIntegrationTestCase extends GradleUnitTestCase { + + protected File getProjectDir(String name) { + File root = new File("src/testKit/"); + if (root.exists() == false) { + throw new RuntimeException("Could not find resources dir for integration tests. " + + "Note that these tests can only be ran by Gradle and are not currently supported by the IDE"); + } + return new File(root, name); + } + +} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleUnitTestCase.java b/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleUnitTestCase.java new file mode 100644 index 0000000000000..b24624c7854b8 --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleUnitTestCase.java @@ -0,0 +1,14 @@ +package org.elasticsearch.gradle.test; + +import com.carrotsearch.randomizedtesting.JUnit4MethodProvider; +import com.carrotsearch.randomizedtesting.RandomizedRunner; +import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders; +import org.junit.runner.RunWith; + +@RunWith(RandomizedRunner.class) +@TestMethodProviders({ + JUnit4MethodProvider.class, + JUnit3MethodProvider.class +}) +public abstract class GradleUnitTestCase extends BaseTestCase { +} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/test/JUnit3MethodProvider.java b/buildSrc/src/test/java/org/elasticsearch/gradle/test/JUnit3MethodProvider.java new file mode 100644 index 0000000000000..18871e16555ef --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/test/JUnit3MethodProvider.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.test; + +import com.carrotsearch.randomizedtesting.ClassModel; +import com.carrotsearch.randomizedtesting.ClassModel.MethodModel; +import com.carrotsearch.randomizedtesting.TestMethodProvider; + +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Map; + +/** + * Backwards compatible test* method provider (public, non-static). + * + * copy of org.apache.lucene.util.LuceneJUnit3MethodProvider to avoid a dependency between build and test fw. + */ +public final class JUnit3MethodProvider implements TestMethodProvider { + @Override + public Collection getTestMethods(Class suiteClass, ClassModel classModel) { + Map methods = classModel.getMethods(); + ArrayList result = new ArrayList<>(); + for (MethodModel mm : methods.values()) { + // Skip any methods that have overrieds/ shadows. + if (mm.getDown() != null) continue; + + Method m = mm.element; + if (m.getName().startsWith("test") && + Modifier.isPublic(m.getModifiers()) && + !Modifier.isStatic(m.getModifiers()) && + m.getParameterTypes().length == 0) { + result.add(m); + } + } + return result; + } +} diff --git a/buildSrc/src/testKit/namingConventionsSelfTest/build.gradle b/buildSrc/src/testKit/namingConventionsSelfTest/build.gradle new file mode 100644 index 0000000000000..47e0e94b86ac2 --- /dev/null +++ b/buildSrc/src/testKit/namingConventionsSelfTest/build.gradle @@ -0,0 +1,30 @@ +plugins { + id 'java' + id 'elasticsearch.build' +} + +dependencyLicenses.enabled = false +dependenciesInfo.enabled = false +forbiddenApisMain.enabled = false +forbiddenApisTest.enabled = false +jarHell.enabled = false +thirdPartyAudit.enabled = false + +ext.licenseFile = file("$buildDir/dummy/license") +ext.noticeFile = file("$buildDir/dummy/notice") + +task hello { + doFirst { + println "build plugin can be applied" + } +} + +dependencies { + compile "junit:junit:${versions.junit}" +} + +namingConventions { + checkForTestsInMain = project.property("checkForTestsInMain") == "true" + testClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$UnitTestCase' + integTestClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$IntegTestCase' +} diff --git a/buildSrc/src/test/java/org/elasticsearch/test/NamingConventionsCheckBadClasses.java b/buildSrc/src/testKit/namingConventionsSelfTest/src/main/java/org/elasticsearch/test/NamingConventionsCheckBadClasses.java similarity index 100% rename from buildSrc/src/test/java/org/elasticsearch/test/NamingConventionsCheckBadClasses.java rename to buildSrc/src/testKit/namingConventionsSelfTest/src/main/java/org/elasticsearch/test/NamingConventionsCheckBadClasses.java diff --git a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java b/buildSrc/src/testKit/namingConventionsSelfTest/src/test/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java similarity index 89% rename from buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java rename to buildSrc/src/testKit/namingConventionsSelfTest/src/test/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java index 46adc7f065b16..438f80154191b 100644 --- a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java +++ b/buildSrc/src/testKit/namingConventionsSelfTest/src/test/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java @@ -23,4 +23,9 @@ * This class should fail the naming conventions self test. */ public class NamingConventionsCheckInMainIT { + + public static class InternalInvalidTests extends NamingConventionsCheckBadClasses.UnitTestCase { + + } + } diff --git a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainTests.java b/buildSrc/src/testKit/namingConventionsSelfTest/src/test/java/org/elasticsearch/test/NamingConventionsCheckInMainTests.java similarity index 100% rename from buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainTests.java rename to buildSrc/src/testKit/namingConventionsSelfTest/src/test/java/org/elasticsearch/test/NamingConventionsCheckInMainTests.java diff --git a/buildSrc/src/testKit/namingConventionsSelfTest/src/test/java/org/elasticsearch/test/WrongName.java b/buildSrc/src/testKit/namingConventionsSelfTest/src/test/java/org/elasticsearch/test/WrongName.java new file mode 100644 index 0000000000000..64d6a237f8f4d --- /dev/null +++ b/buildSrc/src/testKit/namingConventionsSelfTest/src/test/java/org/elasticsearch/test/WrongName.java @@ -0,0 +1,26 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test; + +/** + * This class should fail the naming conventions self test. + */ +public class WrongName extends NamingConventionsCheckBadClasses.UnitTestCase { +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 63a0e0e98377e..839d86bf9f10a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -37,6 +37,8 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; @@ -65,14 +67,15 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.DeletePipelineRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; @@ -100,6 +103,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.tasks.TaskId; @@ -601,6 +605,21 @@ static Request searchTemplate(SearchTemplateRequest searchTemplateRequest) throw request.setEntity(createEntity(searchTemplateRequest, REQUEST_BODY_CONTENT_TYPE)); return request; } + + static Request multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest) throws IOException { + Request request = new Request(HttpPost.METHOD_NAME, "/_msearch/template"); + + Params params = new Params(request); + params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true"); + if (multiSearchTemplateRequest.maxConcurrentSearchRequests() != MultiSearchRequest.MAX_CONCURRENT_SEARCH_REQUESTS_DEFAULT) { + params.putParam("max_concurrent_searches", Integer.toString(multiSearchTemplateRequest.maxConcurrentSearchRequests())); + } + + XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent(); + byte[] source = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, xContent); + request.setEntity(new ByteArrayEntity(source, createContentType(xContent.type()))); + return request; + } static Request existsAlias(GetAliasesRequest getAliasesRequest) { if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) && @@ -618,6 +637,19 @@ static Request existsAlias(GetAliasesRequest getAliasesRequest) { return request; } + static Request explain(ExplainRequest explainRequest) throws IOException { + Request request = new Request(HttpGet.METHOD_NAME, + endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain")); + + Params params = new Params(request); + params.withStoredFields(explainRequest.storedFields()); + params.withFetchSourceContext(explainRequest.fetchSourceContext()); + params.withRouting(explainRequest.routing()); + params.withPreference(explainRequest.preference()); + request.setEntity(createEntity(explainRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request fieldCaps(FieldCapabilitiesRequest fieldCapabilitiesRequest) { Request request = new Request(HttpGet.METHOD_NAME, endpoint(fieldCapabilitiesRequest.indices(), "_field_caps")); @@ -866,6 +898,39 @@ static Request verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest) return request; } + static Request createSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException { + String endpoint = new EndpointBuilder().addPathPart("_snapshot") + .addPathPart(createSnapshotRequest.repository()) + .addPathPart(createSnapshotRequest.snapshot()) + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + Params params = new Params(request); + params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout()); + params.withWaitForCompletion(createSnapshotRequest.waitForCompletion()); + request.setEntity(createEntity(createSnapshotRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request getSnapshots(GetSnapshotsRequest getSnapshotsRequest) { + EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPartAsIs("_snapshot") + .addPathPart(getSnapshotsRequest.repository()); + String endpoint; + if (getSnapshotsRequest.snapshots().length == 0) { + endpoint = endpointBuilder.addPathPart("_all").build(); + } else { + endpoint = endpointBuilder.addCommaSeparatedPathParts(getSnapshotsRequest.snapshots()).build(); + } + + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + Params parameters = new Params(request); + parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout()); + parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable())); + parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose())); + + return request; + } + static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) { String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") .addPathPart(deleteSnapshotRequest.repository()) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 6905cfdb8f714..48277d67e6d15 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -34,6 +34,8 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.explain.ExplainRequest; +import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.get.GetRequest; @@ -66,6 +68,8 @@ import org.elasticsearch.plugins.spi.NamedXContentProvider; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; +import org.elasticsearch.script.mustache.MultiSearchTemplateResponse; import org.elasticsearch.script.mustache.SearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateResponse; import org.elasticsearch.search.aggregations.Aggregation; @@ -614,6 +618,42 @@ public final void searchTemplateAsync(SearchTemplateRequest searchTemplateReques SearchTemplateResponse::fromXContent, listener, emptySet()); } + /** + * Executes a request using the Explain API. + * See Explain API on elastic.co + * @param explainRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final ExplainResponse explain(ExplainRequest explainRequest, RequestOptions options) throws IOException { + return performRequest(explainRequest, RequestConverters::explain, options, + response -> { + CheckedFunction entityParser = + parser -> ExplainResponse.fromXContent(parser, convertExistsResponse(response)); + return parseEntity(response.getEntity(), entityParser); + }, + singleton(404)); + } + + /** + * Asynchronously executes a request using the Explain API. + * + * See Explain API on elastic.co + * @param explainRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void explainAsync(ExplainRequest explainRequest, RequestOptions options, ActionListener listener) { + performRequestAsync(explainRequest, RequestConverters::explain, options, + response -> { + CheckedFunction entityParser = + parser -> ExplainResponse.fromXContent(parser, convertExistsResponse(response)); + return parseEntity(response.getEntity(), entityParser); + }, + listener, singleton(404)); + } + /** * Executes a request using the Ranking Evaluation API. * See Ranking Evaluation API @@ -628,6 +668,32 @@ public final RankEvalResponse rankEval(RankEvalRequest rankEvalRequest, RequestO emptySet()); } + + /** + * Executes a request using the Multi Search Template API. + * + * See Multi Search Template API + * on elastic.co. + */ + public final MultiSearchTemplateResponse multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest, + RequestOptions options) throws IOException { + return performRequestAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate, + options, MultiSearchTemplateResponse::fromXContext, emptySet()); + } + + /** + * Asynchronously executes a request using the Multi Search Template API + * + * See Multi Search Template API + * on elastic.co. + */ + public final void multiSearchTemplateAsync(MultiSearchTemplateRequest multiSearchTemplateRequest, + RequestOptions options, + ActionListener listener) { + performRequestAsyncAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate, + options, MultiSearchTemplateResponse::fromXContext, listener, emptySet()); + } + /** * Asynchronously executes a request using the Ranking Evaluation API. * See Ranking Evaluation API diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java index 36b4f473ce82f..fa147a338de0a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java @@ -28,8 +28,12 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import java.io.IOException; @@ -164,6 +168,59 @@ public void verifyRepositoryAsync(VerifyRepositoryRequest verifyRepositoryReques VerifyRepositoryResponse::fromXContent, listener, emptySet()); } + /** + * Creates a snapshot. + *

+ * See Snapshot and Restore + * API on elastic.co + */ + public CreateSnapshotResponse createSnapshot(CreateSnapshotRequest createSnapshotRequest, RequestOptions options) + throws IOException { + return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options, + CreateSnapshotResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously creates a snapshot. + *

+ * See Snapshot and Restore + * API on elastic.co + */ + public void createSnapshotAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options, + CreateSnapshotResponse::fromXContent, listener, emptySet()); + } + + /** + * Get snapshots. + * See Snapshot and Restore + * API on elastic.co + * + * @param getSnapshotsRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetSnapshotsResponse get(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(getSnapshotsRequest, RequestConverters::getSnapshots, options, + GetSnapshotsResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously get snapshots. + * See Snapshot and Restore + * API on elastic.co + * + * @param getSnapshotsRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void getAsync(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(getSnapshotsRequest, RequestConverters::getSnapshots, options, + GetSnapshotsResponse::fromXContent, listener, emptySet()); + } + /** * Deletes a snapshot. * See Snapshot and Restore diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index b8714967b412a..e838989a0c853 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -37,7 +37,9 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.indices.alias.Alias; @@ -68,6 +70,7 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.MultiGetRequest; @@ -111,6 +114,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.rankeval.PrecisionAtK; import org.elasticsearch.index.rankeval.RankEvalRequest; @@ -120,6 +124,7 @@ import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; @@ -1369,7 +1374,53 @@ public void testRenderSearchTemplate() throws Exception { assertEquals(Collections.emptyMap(), request.getParameters()); assertToXContentBody(searchTemplateRequest, request.getEntity()); } - + + public void testMultiSearchTemplate() throws Exception { + final int numSearchRequests = randomIntBetween(1, 10); + MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest(); + + for (int i = 0; i < numSearchRequests; i++) { + // Create a random request. + String[] indices = randomIndicesNames(0, 5); + SearchRequest searchRequest = new SearchRequest(indices); + + Map expectedParams = new HashMap<>(); + setRandomSearchParams(searchRequest, expectedParams); + + // scroll is not supported in the current msearch or msearchtemplate api, so unset it: + searchRequest.scroll((Scroll) null); + // batched reduce size is currently not set-able on a per-request basis as it is a query string parameter only + searchRequest.setBatchedReduceSize(SearchRequest.DEFAULT_BATCHED_REDUCE_SIZE); + + setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams); + + SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(searchRequest); + + searchTemplateRequest.setScript("{\"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" }}}"); + searchTemplateRequest.setScriptType(ScriptType.INLINE); + searchTemplateRequest.setProfile(randomBoolean()); + + Map scriptParams = new HashMap<>(); + scriptParams.put("field", "name"); + scriptParams.put("value", randomAlphaOfLengthBetween(2, 5)); + searchTemplateRequest.setScriptParams(scriptParams); + + multiSearchTemplateRequest.add(searchTemplateRequest); + } + + Request multiRequest = RequestConverters.multiSearchTemplate(multiSearchTemplateRequest); + + assertEquals(HttpPost.METHOD_NAME, multiRequest.getMethod()); + assertEquals("/_msearch/template", multiRequest.getEndpoint()); + List searchRequests = multiSearchTemplateRequest.requests(); + assertEquals(numSearchRequests, searchRequests.size()); + + HttpEntity actualEntity = multiRequest.getEntity(); + byte[] expectedBytes = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, XContentType.JSON.xContent()); + assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType().getValue()); + assertEquals(new BytesArray(expectedBytes), new BytesArray(EntityUtils.toByteArray(actualEntity))); + } + public void testExistsAlias() { GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); @@ -1418,6 +1469,49 @@ public void testExistsAliasNoAliasNoIndex() { } } + public void testExplain() throws IOException { + String index = randomAlphaOfLengthBetween(3, 10); + String type = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); + + ExplainRequest explainRequest = new ExplainRequest(index, type, id); + explainRequest.query(QueryBuilders.termQuery(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10))); + + Map expectedParams = new HashMap<>(); + + if (randomBoolean()) { + String routing = randomAlphaOfLengthBetween(3, 10); + explainRequest.routing(routing); + expectedParams.put("routing", routing); + } + if (randomBoolean()) { + String preference = randomAlphaOfLengthBetween(3, 10); + explainRequest.preference(preference); + expectedParams.put("preference", preference); + } + if (randomBoolean()) { + String[] storedFields = generateRandomStringArray(10, 5, false, false); + String storedFieldsParams = randomFields(storedFields); + explainRequest.storedFields(storedFields); + expectedParams.put("stored_fields", storedFieldsParams); + } + if (randomBoolean()) { + randomizeFetchSourceContextParams(explainRequest::fetchSourceContext, expectedParams); + } + + Request request = RequestConverters.explain(explainRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + endpoint.add(index) + .add(type) + .add(id) + .add("_explain"); + + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals(endpoint.toString(), request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertToXContentBody(explainRequest, request.getEntity()); + } + public void testFieldCaps() { // Create a random request. String[] indices = randomIndicesNames(0, 5); @@ -1943,6 +2037,80 @@ public void testVerifyRepository() { assertThat(expectedParams, equalTo(request.getParameters())); } + public void testCreateSnapshot() throws IOException { + Map expectedParams = new HashMap<>(); + String repository = randomIndicesNames(1, 1)[0]; + String snapshot = "snapshot-" + generateRandomStringArray(1, randomInt(10), false, false)[0]; + String endpoint = "/_snapshot/" + repository + "/" + snapshot; + + CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot); + setRandomMasterTimeout(createSnapshotRequest, expectedParams); + Boolean waitForCompletion = randomBoolean(); + createSnapshotRequest.waitForCompletion(waitForCompletion); + + if (waitForCompletion) { + expectedParams.put("wait_for_completion", waitForCompletion.toString()); + } + + Request request = RequestConverters.createSnapshot(createSnapshotRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + assertToXContentBody(createSnapshotRequest, request.getEntity()); + } + + public void testGetSnapshots() { + Map expectedParams = new HashMap<>(); + String repository = randomIndicesNames(1, 1)[0]; + String snapshot1 = "snapshot1-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); + String snapshot2 = "snapshot2-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); + + String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s,%s", repository, snapshot1, snapshot2); + + GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(); + getSnapshotsRequest.repository(repository); + getSnapshotsRequest.snapshots(Arrays.asList(snapshot1, snapshot2).toArray(new String[0])); + setRandomMasterTimeout(getSnapshotsRequest, expectedParams); + + boolean ignoreUnavailable = randomBoolean(); + getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable); + expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable)); + + boolean verbose = randomBoolean(); + getSnapshotsRequest.verbose(verbose); + expectedParams.put("verbose", Boolean.toString(verbose)); + + Request request = RequestConverters.getSnapshots(getSnapshotsRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + assertNull(request.getEntity()); + } + + public void testGetAllSnapshots() { + Map expectedParams = new HashMap<>(); + String repository = randomIndicesNames(1, 1)[0]; + + String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/_all", repository); + + GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(repository); + setRandomMasterTimeout(getSnapshotsRequest, expectedParams); + + boolean ignoreUnavailable = randomBoolean(); + getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable); + expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable)); + + boolean verbose = randomBoolean(); + getSnapshotsRequest.verbose(verbose); + expectedParams.put("verbose", Boolean.toString(verbose)); + + Request request = RequestConverters.getSnapshots(getSnapshotsRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + assertNull(request.getEntity()); + } + public void testDeleteSnapshot() { Map expectedParams = new HashMap<>(); String repository = randomIndicesNames(1, 1)[0]; @@ -2264,7 +2432,7 @@ private static void setRandomSearchParams(SearchRequest searchRequest, expectedParams.put("preference", searchRequest.preference()); } if (randomBoolean()) { - searchRequest.searchType(randomFrom(SearchType.values())); + searchRequest.searchType(randomFrom(SearchType.CURRENTLY_SUPPORTED)); } expectedParams.put("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT)); if (randomBoolean()) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index a87aec7c2cf87..18a43ffa8d404 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -27,6 +27,8 @@ import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.explain.ExplainRequest; +import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; @@ -44,6 +46,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.ScriptQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.join.aggregations.Children; @@ -51,6 +54,9 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; +import org.elasticsearch.script.mustache.MultiSearchTemplateResponse; +import org.elasticsearch.script.mustache.MultiSearchTemplateResponse.Item; import org.elasticsearch.script.mustache.SearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateResponse; import org.elasticsearch.search.SearchHit; @@ -63,6 +69,7 @@ import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.Suggest; @@ -135,7 +142,44 @@ public void indexDocuments() throws IOException { client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/5", Collections.emptyMap(), doc); doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON); client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/6", Collections.emptyMap(), doc); - client().performRequest(HttpPost.METHOD_NAME, "/index1,index2,index3/_refresh"); + + mappings = new StringEntity( + "{" + + " \"mappings\": {" + + " \"doc\": {" + + " \"properties\": {" + + " \"field1\": {" + + " \"type\": \"keyword\"," + + " \"store\": true" + + " }," + + " \"field2\": {" + + " \"type\": \"keyword\"," + + " \"store\": true" + + " }" + + " }" + + " }" + + " }" + + "}}", + ContentType.APPLICATION_JSON); + client().performRequest(HttpPut.METHOD_NAME, "/index4", Collections.emptyMap(), mappings); + doc = new StringEntity("{\"field1\":\"value1\", \"field2\":\"value2\"}", ContentType.APPLICATION_JSON); + client().performRequest(HttpPut.METHOD_NAME, "/index4/doc/1", Collections.emptyMap(), doc); + StringEntity aliasFilter = new StringEntity( + "{" + + " \"actions\" : [" + + " {" + + " \"add\" : {" + + " \"index\" : \"index4\"," + + " \"alias\" : \"alias4\"," + + " \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }" + + " }" + + " }" + + " ]" + + "}", + ContentType.APPLICATION_JSON); + client().performRequest(HttpPost.METHOD_NAME, "/_aliases", Collections.emptyMap(), aliasFilter); + + client().performRequest(HttpPost.METHOD_NAME, "/index1,index2,index3,index4/_refresh"); } public void testSearchNoQuery() throws IOException { @@ -834,6 +878,273 @@ public void testRenderSearchTemplate() throws IOException { assertToXContentEquivalent(expectedSource, actualSource, XContentType.JSON); } + + + public void testMultiSearchTemplate() throws Exception { + MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest(); + + SearchTemplateRequest goodRequest = new SearchTemplateRequest(); + goodRequest.setRequest(new SearchRequest("index")); + goodRequest.setScriptType(ScriptType.INLINE); + goodRequest.setScript( + "{" + + " \"query\": {" + + " \"match\": {" + + " \"num\": {{number}}" + + " }" + + " }" + + "}"); + Map scriptParams = new HashMap<>(); + scriptParams.put("number", 10); + goodRequest.setScriptParams(scriptParams); + goodRequest.setExplain(true); + goodRequest.setProfile(true); + multiSearchTemplateRequest.add(goodRequest); + + + SearchTemplateRequest badRequest = new SearchTemplateRequest(); + badRequest.setRequest(new SearchRequest("index")); + badRequest.setScriptType(ScriptType.INLINE); + badRequest.setScript("{ NOT VALID JSON {{number}} }"); + scriptParams = new HashMap<>(); + scriptParams.put("number", 10); + badRequest.setScriptParams(scriptParams); + + multiSearchTemplateRequest.add(badRequest); + + MultiSearchTemplateResponse multiSearchTemplateResponse = + execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, + highLevelClient()::multiSearchTemplateAsync); + + Item[] responses = multiSearchTemplateResponse.getResponses(); + + assertEquals(2, responses.length); + + + assertNull(responses[0].getResponse().getSource()); + SearchResponse goodResponse =responses[0].getResponse().getResponse(); + assertNotNull(goodResponse); + assertThat(responses[0].isFailure(), Matchers.is(false)); + assertEquals(1, goodResponse.getHits().totalHits); + assertEquals(1, goodResponse.getHits().getHits().length); + assertThat(goodResponse.getHits().getMaxScore(), greaterThan(0f)); + SearchHit hit = goodResponse.getHits().getHits()[0]; + assertNotNull(hit.getExplanation()); + assertFalse(goodResponse.getProfileResults().isEmpty()); + + + assertNull(responses[0].getResponse().getSource()); + assertThat(responses[1].isFailure(), Matchers.is(true)); + assertNotNull(responses[1].getFailureMessage()); + assertThat(responses[1].getFailureMessage(), containsString("json_parse_exception")); + } + + public void testMultiSearchTemplateAllBad() throws Exception { + MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest(); + + SearchTemplateRequest badRequest1 = new SearchTemplateRequest(); + badRequest1.setRequest(new SearchRequest("index")); + badRequest1.setScriptType(ScriptType.INLINE); + badRequest1.setScript( + "{" + + " \"query\": {" + + " \"match\": {" + + " \"num\": {{number}}" + + " }" + + " }" + + "}"); + Map scriptParams = new HashMap<>(); + scriptParams.put("number", "BAD NUMBER"); + badRequest1.setScriptParams(scriptParams); + multiSearchTemplateRequest.add(badRequest1); + + + SearchTemplateRequest badRequest2 = new SearchTemplateRequest(); + badRequest2.setRequest(new SearchRequest("index")); + badRequest2.setScriptType(ScriptType.INLINE); + badRequest2.setScript("BAD QUERY TEMPLATE"); + scriptParams = new HashMap<>(); + scriptParams.put("number", "BAD NUMBER"); + badRequest2.setScriptParams(scriptParams); + + multiSearchTemplateRequest.add(badRequest2); + + // The whole HTTP request should fail if no nested search requests are valid + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, + () -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, + highLevelClient()::multiSearchTemplateAsync)); + + assertEquals(RestStatus.BAD_REQUEST, exception.status()); + assertThat(exception.getMessage(), containsString("no requests added")); + } + + public void testExplain() throws IOException { + { + ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1"); + explainRequest.query(QueryBuilders.matchAllQuery()); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertThat(explainResponse.getIndex(), equalTo("index1")); + assertThat(explainResponse.getType(), equalTo("doc")); + assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); + assertTrue(explainResponse.isExists()); + assertTrue(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f)); + assertNull(explainResponse.getGetResult()); + } + { + ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1"); + explainRequest.query(QueryBuilders.termQuery("field", "value1")); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertThat(explainResponse.getIndex(), equalTo("index1")); + assertThat(explainResponse.getType(), equalTo("doc")); + assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); + assertTrue(explainResponse.isExists()); + assertTrue(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getValue(), greaterThan(0.0f)); + assertNull(explainResponse.getGetResult()); + } + { + ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1"); + explainRequest.query(QueryBuilders.termQuery("field", "value2")); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertThat(explainResponse.getIndex(), equalTo("index1")); + assertThat(explainResponse.getType(), equalTo("doc")); + assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); + assertTrue(explainResponse.isExists()); + assertFalse(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertNull(explainResponse.getGetResult()); + } + { + ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1"); + explainRequest.query(QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("field", "value1")) + .must(QueryBuilders.termQuery("field", "value2"))); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertThat(explainResponse.getIndex(), equalTo("index1")); + assertThat(explainResponse.getType(), equalTo("doc")); + assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); + assertTrue(explainResponse.isExists()); + assertFalse(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getDetails().length, equalTo(2)); + assertNull(explainResponse.getGetResult()); + } + } + + public void testExplainNonExistent() throws IOException { + { + ExplainRequest explainRequest = new ExplainRequest("non_existent_index", "doc", "1"); + explainRequest.query(QueryBuilders.matchQuery("field", "value")); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, + () -> execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync)); + assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND)); + assertThat(exception.getIndex().getName(), equalTo("non_existent_index")); + assertThat(exception.getDetailedMessage(), + containsString("Elasticsearch exception [type=index_not_found_exception, reason=no such index]")); + } + { + ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "999"); + explainRequest.query(QueryBuilders.matchQuery("field", "value1")); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertThat(explainResponse.getIndex(), equalTo("index1")); + assertThat(explainResponse.getType(), equalTo("doc")); + assertThat(explainResponse.getId(), equalTo("999")); + assertFalse(explainResponse.isExists()); + assertFalse(explainResponse.isMatch()); + assertFalse(explainResponse.hasExplanation()); + assertNull(explainResponse.getGetResult()); + } + } + + public void testExplainWithStoredFields() throws IOException { + { + ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1"); + explainRequest.query(QueryBuilders.matchAllQuery()); + explainRequest.storedFields(new String[]{"field1"}); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertTrue(explainResponse.isExists()); + assertTrue(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f)); + assertTrue(explainResponse.getGetResult().isExists()); + assertThat(explainResponse.getGetResult().getFields().keySet(), equalTo(Collections.singleton("field1"))); + assertThat(explainResponse.getGetResult().getFields().get("field1").getValue().toString(), equalTo("value1")); + assertTrue(explainResponse.getGetResult().isSourceEmpty()); + } + { + ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1"); + explainRequest.query(QueryBuilders.matchAllQuery()); + explainRequest.storedFields(new String[]{"field1", "field2"}); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertTrue(explainResponse.isExists()); + assertTrue(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f)); + assertTrue(explainResponse.getGetResult().isExists()); + assertThat(explainResponse.getGetResult().getFields().keySet().size(), equalTo(2)); + assertThat(explainResponse.getGetResult().getFields().get("field1").getValue().toString(), equalTo("value1")); + assertThat(explainResponse.getGetResult().getFields().get("field2").getValue().toString(), equalTo("value2")); + assertTrue(explainResponse.getGetResult().isSourceEmpty()); + } + } + + public void testExplainWithFetchSource() throws IOException { + { + ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1"); + explainRequest.query(QueryBuilders.matchAllQuery()); + explainRequest.fetchSourceContext(new FetchSourceContext(true, new String[]{"field1"}, null)); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertTrue(explainResponse.isExists()); + assertTrue(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f)); + assertTrue(explainResponse.getGetResult().isExists()); + assertThat(explainResponse.getGetResult().getSource(), equalTo(Collections.singletonMap("field1", "value1"))); + } + { + ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1"); + explainRequest.query(QueryBuilders.matchAllQuery()); + explainRequest.fetchSourceContext(new FetchSourceContext(true, null, new String[] {"field2"})); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertTrue(explainResponse.isExists()); + assertTrue(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f)); + assertTrue(explainResponse.getGetResult().isExists()); + assertThat(explainResponse.getGetResult().getSource(), equalTo(Collections.singletonMap("field1", "value1"))); + } + } + + public void testExplainWithAliasFilter() throws IOException { + ExplainRequest explainRequest = new ExplainRequest("alias4", "doc", "1"); + explainRequest.query(QueryBuilders.matchAllQuery()); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertTrue(explainResponse.isExists()); + assertFalse(explainResponse.isMatch()); + } public void testFieldCaps() throws IOException { FieldCapabilitiesRequest request = new FieldCapabilitiesRequest() diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java index f4d325e158bc5..7ec2ee80f04ac 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java @@ -28,15 +28,20 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.Locale; +import java.util.stream.Collectors; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; public class SnapshotIT extends ESRestHighLevelClientTestCase { @@ -49,12 +54,12 @@ private PutRepositoryResponse createTestRepository(String repository, String typ highLevelClient().snapshot()::createRepositoryAsync); } - private Response createTestSnapshot(String repository, String snapshot) throws IOException { - Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repository, snapshot)); - createSnapshot.addParameter("wait_for_completion", "true"); - return highLevelClient().getLowLevelClient().performRequest(createSnapshot); - } + private CreateSnapshotResponse createTestSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException { + // assumes the repository already exists + return execute(createSnapshotRequest, highLevelClient().snapshot()::createSnapshot, + highLevelClient().snapshot()::createSnapshotAsync); + } public void testCreateRepository() throws IOException { PutRepositoryResponse response = createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}"); @@ -119,6 +124,55 @@ public void testVerifyRepository() throws IOException { assertThat(response.getNodes().size(), equalTo(1)); } + public void testCreateSnapshot() throws IOException { + String repository = "test_repository"; + assertTrue(createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged()); + + String snapshot = "test_snapshot"; + CreateSnapshotRequest request = new CreateSnapshotRequest(repository, snapshot); + boolean waitForCompletion = randomBoolean(); + request.waitForCompletion(waitForCompletion); + request.partial(randomBoolean()); + request.includeGlobalState(randomBoolean()); + + CreateSnapshotResponse response = createTestSnapshot(request); + assertEquals(waitForCompletion ? RestStatus.OK : RestStatus.ACCEPTED, response.status()); + } + + public void testGetSnapshots() throws IOException { + String repository = "test_repository"; + String snapshot1 = "test_snapshot1"; + String snapshot2 = "test_snapshot2"; + + PutRepositoryResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}"); + assertTrue(putRepositoryResponse.isAcknowledged()); + + CreateSnapshotRequest createSnapshotRequest1 = new CreateSnapshotRequest(repository, snapshot1); + createSnapshotRequest1.waitForCompletion(true); + CreateSnapshotResponse putSnapshotResponse1 = createTestSnapshot(createSnapshotRequest1); + CreateSnapshotRequest createSnapshotRequest2 = new CreateSnapshotRequest(repository, snapshot2); + createSnapshotRequest2.waitForCompletion(true); + CreateSnapshotResponse putSnapshotResponse2 = createTestSnapshot(createSnapshotRequest2); + // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead. + assertEquals(RestStatus.OK, putSnapshotResponse1.status()); + assertEquals(RestStatus.OK, putSnapshotResponse2.status()); + + GetSnapshotsRequest request; + if (randomBoolean()) { + request = new GetSnapshotsRequest(repository); + } else if (randomBoolean()) { + request = new GetSnapshotsRequest(repository, new String[] {"_all"}); + + } else { + request = new GetSnapshotsRequest(repository, new String[] {snapshot1, snapshot2}); + } + GetSnapshotsResponse response = execute(request, highLevelClient().snapshot()::get, highLevelClient().snapshot()::getAsync); + + assertEquals(2, response.getSnapshots().size()); + assertThat(response.getSnapshots().stream().map((s) -> s.snapshotId().getName()).collect(Collectors.toList()), + contains("test_snapshot1", "test_snapshot2")); + } + public void testDeleteSnapshot() throws IOException { String repository = "test_repository"; String snapshot = "test_snapshot"; @@ -126,9 +180,11 @@ public void testDeleteSnapshot() throws IOException { PutRepositoryResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}"); assertTrue(putRepositoryResponse.isAcknowledged()); - Response putSnapshotResponse = createTestSnapshot(repository, snapshot); + CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot); + createSnapshotRequest.waitForCompletion(true); + CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest); // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead. - assertEquals(200, putSnapshotResponse.getStatusLine().getStatusCode()); + assertEquals(RestStatus.OK, createSnapshotResponse.status()); DeleteSnapshotRequest request = new DeleteSnapshotRequest(repository, snapshot); DeleteSnapshotResponse response = execute(request, highLevelClient().snapshot()::delete, highLevelClient().snapshot()::deleteAsync); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index adc0fede1aa78..308d9ba569931 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -19,12 +19,15 @@ package org.elasticsearch.client.documentation; +import org.apache.lucene.search.Explanation; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.explain.ExplainRequest; +import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; @@ -47,10 +50,12 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -66,6 +71,9 @@ import org.elasticsearch.index.rankeval.RatedSearchHit; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; +import org.elasticsearch.script.mustache.MultiSearchTemplateResponse; +import org.elasticsearch.script.mustache.MultiSearchTemplateResponse.Item; import org.elasticsearch.script.mustache.SearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateResponse; import org.elasticsearch.search.Scroll; @@ -80,6 +88,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.profile.ProfileResult; @@ -767,21 +776,7 @@ public void testSearchTemplateWithStoredScript() throws Exception { RestHighLevelClient client = highLevelClient(); RestClient restClient = client(); - // tag::register-script - Request scriptRequest = new Request("POST", "_scripts/title_search"); - scriptRequest.setJsonEntity( - "{" + - " \"script\": {" + - " \"lang\": \"mustache\"," + - " \"source\": {" + - " \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," + - " \"size\" : \"{{size}}\"" + - " }" + - " }" + - "}"); - Response scriptResponse = restClient.performRequest(scriptRequest); - // end::register-script - assertEquals(RestStatus.OK.getStatus(), scriptResponse.getStatusLine().getStatusCode()); + registerQueryScript(restClient); // tag::search-template-request-stored SearchTemplateRequest request = new SearchTemplateRequest(); @@ -834,6 +829,223 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + + public void testMultiSearchTemplateWithInlineScript() throws Exception { + indexSearchTestData(); + RestHighLevelClient client = highLevelClient(); + + // tag::multi-search-template-request-inline + String [] searchTerms = {"elasticsearch", "logstash", "kibana"}; + + MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest(); // <1> + for (String searchTerm : searchTerms) { + SearchTemplateRequest request = new SearchTemplateRequest(); // <2> + request.setRequest(new SearchRequest("posts")); + + request.setScriptType(ScriptType.INLINE); + request.setScript( + "{" + + " \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," + + " \"size\" : \"{{size}}\"" + + "}"); + + Map scriptParams = new HashMap<>(); + scriptParams.put("field", "title"); + scriptParams.put("value", searchTerm); + scriptParams.put("size", 5); + request.setScriptParams(scriptParams); + + multiRequest.add(request); // <3> + } + // end::multi-search-template-request-inline + + // tag::multi-search-template-request-sync + MultiSearchTemplateResponse multiResponse = client.multiSearchTemplate(multiRequest, RequestOptions.DEFAULT); + // end::multi-search-template-request-sync + + // tag::multi-search-template-response + for (Item item : multiResponse.getResponses()) { // <1> + if (item.isFailure()) { + String error = item.getFailureMessage(); // <2> + } else { + SearchTemplateResponse searchTemplateResponse = item.getResponse(); // <3> + SearchResponse searchResponse = searchTemplateResponse.getResponse(); + searchResponse.getHits(); + } + } + // end::multi-search-template-response + + assertNotNull(multiResponse); + assertEquals(searchTerms.length, multiResponse.getResponses().length); + assertNotNull(multiResponse.getResponses()[0]); + SearchResponse searchResponse = multiResponse.getResponses()[0].getResponse().getResponse(); + assertTrue(searchResponse.getHits().totalHits > 0); + + } + + public void testMultiSearchTemplateWithStoredScript() throws Exception { + indexSearchTestData(); + RestHighLevelClient client = highLevelClient(); + RestClient restClient = client(); + + registerQueryScript(restClient); + + // tag::multi-search-template-request-stored + MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest(); + + String [] searchTerms = {"elasticsearch", "logstash", "kibana"}; + for (String searchTerm : searchTerms) { + + SearchTemplateRequest request = new SearchTemplateRequest(); + request.setRequest(new SearchRequest("posts")); + + request.setScriptType(ScriptType.STORED); + request.setScript("title_search"); + + Map params = new HashMap<>(); + params.put("field", "title"); + params.put("value", searchTerm); + params.put("size", 5); + request.setScriptParams(params); + multiRequest.add(request); + } + // end::multi-search-template-request-stored + + + + + // tag::multi-search-template-execute + MultiSearchTemplateResponse multiResponse = client.multiSearchTemplate(multiRequest, RequestOptions.DEFAULT); + // end::multi-search-template-execute + + assertNotNull(multiResponse); + assertEquals(searchTerms.length, multiResponse.getResponses().length); + assertNotNull(multiResponse.getResponses()[0]); + SearchResponse searchResponse = multiResponse.getResponses()[0].getResponse().getResponse(); + assertTrue(searchResponse.getHits().totalHits > 0); + + // tag::multi-search-template-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(MultiSearchTemplateResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::multi-search-template-execute-listener + + // Replace the empty listener by a blocking listener for tests. + CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::multi-search-template-execute-async + client.multiSearchTemplateAsync(multiRequest, RequestOptions.DEFAULT, listener); + // end::multi-search-template-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + protected void registerQueryScript(RestClient restClient) throws IOException { + // tag::register-script + Request scriptRequest = new Request("POST", "_scripts/title_search"); + scriptRequest.setJsonEntity( + "{" + + " \"script\": {" + + " \"lang\": \"mustache\"," + + " \"source\": {" + + " \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," + + " \"size\" : \"{{size}}\"" + + " }" + + " }" + + "}"); + Response scriptResponse = restClient.performRequest(scriptRequest); + // end::register-script + assertEquals(RestStatus.OK.getStatus(), scriptResponse.getStatusLine().getStatusCode()); + } + + + public void testExplain() throws Exception { + indexSearchTestData(); + RestHighLevelClient client = highLevelClient(); + + // tag::explain-request + ExplainRequest request = new ExplainRequest("contributors", "doc", "1"); + request.query(QueryBuilders.termQuery("user", "tanguy")); + // end::explain-request + + // tag::explain-request-routing + request.routing("routing"); // <1> + // end::explain-request-routing + + // tag::explain-request-preference + request.preference("_local"); // <1> + // end::explain-request-preference + + // tag::explain-request-source + request.fetchSourceContext(new FetchSourceContext(true, new String[]{"user"}, null)); // <1> + // end::explain-request-source + + // tag::explain-request-stored-field + request.storedFields(new String[]{"user"}); // <1> + // end::explain-request-stored-field + + // tag::explain-execute + ExplainResponse response = client.explain(request, RequestOptions.DEFAULT); + // end::explain-execute + + // tag::explain-response + String index = response.getIndex(); // <1> + String type = response.getType(); // <2> + String id = response.getId(); // <3> + boolean exists = response.isExists(); // <4> + boolean match = response.isMatch(); // <5> + boolean hasExplanation = response.hasExplanation(); // <6> + Explanation explanation = response.getExplanation(); // <7> + GetResult getResult = response.getGetResult(); // <8> + // end::explain-response + assertThat(index, equalTo("contributors")); + assertThat(type, equalTo("doc")); + assertThat(id, equalTo("1")); + assertTrue(exists); + assertTrue(match); + assertTrue(hasExplanation); + assertNotNull(explanation); + assertNotNull(getResult); + + // tag::get-result + Map source = getResult.getSource(); // <1> + Map fields = getResult.getFields(); // <2> + // end::get-result + assertThat(source, equalTo(Collections.singletonMap("user", "tanguy"))); + assertThat(fields.get("user").getValue(), equalTo("tanguy")); + + // tag::explain-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(ExplainResponse explainResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::explain-execute-listener + + CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::explain-execute-async + client.explainAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::explain-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } public void testFieldCaps() throws Exception { indexSearchTestData(); @@ -1046,7 +1258,7 @@ private void indexSearchTestData() throws IOException { assertTrue(authorsResponse.isAcknowledged()); CreateIndexRequest reviewersRequest = new CreateIndexRequest("contributors") - .mapping("doc", "user", "type=keyword"); + .mapping("doc", "user", "type=keyword,store=true"); CreateIndexResponse reviewersResponse = highLevelClient().indices().create(reviewersRequest, RequestOptions.DEFAULT); assertTrue(reviewersResponse.isAcknowledged()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index 965f9641e48ad..2d126fb970cbf 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -29,6 +29,12 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; @@ -41,6 +47,8 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.snapshots.SnapshotInfo; import java.io.IOException; import java.util.HashMap; @@ -367,6 +375,164 @@ public void onFailure(Exception e) { } } + public void testSnapshotCreate() throws IOException { + RestHighLevelClient client = highLevelClient(); + + CreateIndexRequest createIndexRequest = new CreateIndexRequest("test-index0"); + client.indices().create(createIndexRequest, RequestOptions.DEFAULT); + createIndexRequest = new CreateIndexRequest("test-index1"); + client.indices().create(createIndexRequest, RequestOptions.DEFAULT); + + createTestRepositories(); + + // tag::create-snapshot-request + CreateSnapshotRequest request = new CreateSnapshotRequest(); + // end::create-snapshot-request + + // tag::create-snapshot-request-repositoryName + request.repository(repositoryName); // <1> + // end::create-snapshot-request-repositoryName + // tag::create-snapshot-request-snapshotName + request.snapshot(snapshotName); // <1> + // end::create-snapshot-request-snapshotName + // tag::create-snapshot-request-indices + request.indices("test-index0", "test-index1"); // <1> + // end::create-snapshot-request-indices + // tag::create-snapshot-request-indicesOptions + request.indicesOptions(IndicesOptions.fromOptions(false, false, true, true)); // <1> + // end::create-snapshot-request-indicesOptions + // tag::create-snapshot-request-partial + request.partial(false); // <1> + // end::create-snapshot-request-partial + // tag::create-snapshot-request-includeGlobalState + request.includeGlobalState(true); // <1> + // end::create-snapshot-request-includeGlobalState + + // tag::create-snapshot-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> + request.masterNodeTimeout("1m"); // <2> + // end::create-snapshot-request-masterTimeout + // tag::create-snapshot-request-waitForCompletion + request.waitForCompletion(true); // <1> + // end::create-snapshot-request-waitForCompletion + + // tag::create-snapshot-execute + CreateSnapshotResponse response = client.snapshot().createSnapshot(request, RequestOptions.DEFAULT); + // end::create-snapshot-execute + + // tag::create-snapshot-response + RestStatus status = response.status(); // <1> + // end::create-snapshot-response + + assertEquals(RestStatus.OK, status); + } + + public void testSnapshotCreateAsync() throws InterruptedException { + RestHighLevelClient client = highLevelClient(); + { + CreateSnapshotRequest request = new CreateSnapshotRequest(repositoryName, snapshotName); + + // tag::create-snapshot-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(CreateSnapshotResponse createSnapshotResponse) { + // <1> + } + + @Override + public void onFailure(Exception exception) { + // <2> + } + }; + // end::create-snapshot-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::create-snapshot-execute-async + client.snapshot().createSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::create-snapshot-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + + public void testSnapshotGetSnapshots() throws IOException { + RestHighLevelClient client = highLevelClient(); + + createTestRepositories(); + createTestSnapshots(); + + // tag::get-snapshots-request + GetSnapshotsRequest request = new GetSnapshotsRequest(); + // end::get-snapshots-request + + // tag::get-snapshots-request-repositoryName + request.repository(repositoryName); // <1> + // end::get-snapshots-request-repositoryName + + // tag::get-snapshots-request-snapshots + String[] snapshots = { snapshotName }; + request.snapshots(snapshots); // <1> + // end::get-snapshots-request-snapshots + + // tag::get-snapshots-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> + request.masterNodeTimeout("1m"); // <2> + // end::get-snapshots-request-masterTimeout + + // tag::get-snapshots-request-verbose + request.verbose(true); // <1> + // end::get-snapshots-request-verbose + + // tag::get-snapshots-request-ignore-unavailable + request.ignoreUnavailable(false); // <1> + // end::get-snapshots-request-ignore-unavailable + + // tag::get-snapshots-execute + GetSnapshotsResponse response = client.snapshot().get(request, RequestOptions.DEFAULT); + // end::get-snapshots-execute + + // tag::get-snapshots-response + List snapshotsInfos = response.getSnapshots(); // <1> + // end::get-snapshots-response + assertEquals(1, snapshotsInfos.size()); + } + + public void testSnapshotGetSnapshotsAsync() throws InterruptedException { + RestHighLevelClient client = highLevelClient(); + { + GetSnapshotsRequest request = new GetSnapshotsRequest(repositoryName); + + // tag::get-snapshots-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetSnapshotsResponse getSnapshotsResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-snapshots-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::get-snapshots-execute-async + client.snapshot().getAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::get-snapshots-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testSnapshotDeleteSnapshot() throws IOException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 77c11db455e47..00b275f701c6d 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -615,16 +615,16 @@ private void setHeaders(HttpRequest httpRequest, Collection

requestHeade */ private NodeTuple> nextNode() throws IOException { NodeTuple> nodeTuple = this.nodeTuple; - List hosts = selectHosts(nodeTuple, blacklist, lastNodeIndex, nodeSelector); + Iterable hosts = selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector); return new NodeTuple<>(hosts.iterator(), nodeTuple.authCache); } /** - * Select hosts to try. Package private for testing. + * Select nodes to try and sorts them so that the first one will be tried initially, then the following ones + * if the previous attempt failed and so on. Package private for testing. */ - static List selectHosts(NodeTuple> nodeTuple, - Map blacklist, AtomicInteger lastNodeIndex, - NodeSelector nodeSelector) throws IOException { + static Iterable selectNodes(NodeTuple> nodeTuple, Map blacklist, + AtomicInteger lastNodeIndex, NodeSelector nodeSelector) throws IOException { /* * Sort the nodes into living and dead lists. */ @@ -653,8 +653,8 @@ static List selectHosts(NodeTuple> nodeTuple, nodeSelector.select(selectedLivingNodes); if (false == selectedLivingNodes.isEmpty()) { /* - * Rotate the list so subsequent requests will prefer the - * nodes in a different order. + * Rotate the list using a global counter as the distance so subsequent + * requests will try the nodes in a different order. */ Collections.rotate(selectedLivingNodes, lastNodeIndex.getAndIncrement()); return selectedLivingNodes; @@ -662,15 +662,13 @@ static List selectHosts(NodeTuple> nodeTuple, } /* - * Last resort: If there are no good nodes to use, either because + * Last resort: there are no good nodes to use, either because * the selector rejected all the living nodes or because there aren't * any living ones. Either way, we want to revive a single dead node - * that the NodeSelectors are OK with. We do this by sorting the dead - * nodes by their revival time and passing them through the - * NodeSelector so it can have its say in which nodes are ok and their - * ordering. If the selector is ok with any of the nodes then use just - * the first one in the list because we only want to revive a single - * node. + * that the NodeSelectors are OK with. We do this by passing the dead + * nodes through the NodeSelector so it can have its say in which nodes + * are ok. If the selector is ok with any of the nodes then we will take + * the one in the list that has the lowest revival time and try it. */ if (false == deadNodes.isEmpty()) { final List selectedDeadNodes = new ArrayList<>(deadNodes); @@ -1010,8 +1008,8 @@ public int compareTo(DeadNode rhs) { } /** - * Adapts an Iterator into an - * Iterator. + * Adapts an Iterator<DeadNodeAndRevival> into an + * Iterator<Node>. */ private static class DeadNodeIteratorAdapter implements Iterator { private final Iterator itr; diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index 6b7725666d42d..cb326f4a24c8d 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -314,7 +314,7 @@ public void testBody() throws IOException { } /** - * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddHeaders()}. + * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests}. */ @Deprecated public void tesPerformRequestOldStyleNullHeaders() throws IOException { diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java index 030c2fca6272a..271fc51ef8835 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java @@ -21,6 +21,9 @@ import org.apache.http.Header; import org.apache.http.HttpHost; +import org.apache.http.client.AuthCache; +import org.apache.http.impl.auth.BasicScheme; +import org.apache.http.impl.client.BasicAuthCache; import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; import org.elasticsearch.client.DeadHostStateTests.ConfigurableTimeSupplier; import org.elasticsearch.client.RestClient.NodeTuple; @@ -35,13 +38,14 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; import static java.util.Collections.singletonList; import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -141,7 +145,7 @@ public void onFailure(Exception exception) { } /** - * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddHeader()}. + * @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests}. */ @Deprecated public void testPerformOldStyleAsyncWithNullHeaders() throws Exception { @@ -407,8 +411,8 @@ public String toString() { * blacklist time. It'll revive the node that is closest * to being revived that the NodeSelector is ok with. */ - assertEquals(singletonList(n1), RestClient.selectHosts(nodeTuple, blacklist, new AtomicInteger(), NodeSelector.ANY)); - assertEquals(singletonList(n2), RestClient.selectHosts(nodeTuple, blacklist, new AtomicInteger(), not1)); + assertEquals(singletonList(n1), RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(), NodeSelector.ANY)); + assertEquals(singletonList(n2), RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(), not1)); /* * Try a NodeSelector that excludes all nodes. This should @@ -449,23 +453,23 @@ private void assertSelectLivingHosts(List expectedNodes, NodeTuple blacklist, NodeSelector nodeSelector) throws IOException { int iterations = 1000; AtomicInteger lastNodeIndex = new AtomicInteger(0); - assertEquals(expectedNodes, RestClient.selectHosts(nodeTuple, blacklist, lastNodeIndex, nodeSelector)); + assertEquals(expectedNodes, RestClient.selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector)); // Calling it again rotates the set of results for (int i = 1; i < iterations; i++) { Collections.rotate(expectedNodes, 1); assertEquals("iteration " + i, expectedNodes, - RestClient.selectHosts(nodeTuple, blacklist, lastNodeIndex, nodeSelector)); + RestClient.selectNodes(nodeTuple, blacklist, lastNodeIndex, nodeSelector)); } } /** - * Assert that {@link RestClient#selectHosts} fails on the provided arguments. + * Assert that {@link RestClient#selectNodes} fails on the provided arguments. * @return the message in the exception thrown by the failure */ - private String assertSelectAllRejected( NodeTuple> nodeTuple, + private static String assertSelectAllRejected( NodeTuple> nodeTuple, Map blacklist, NodeSelector nodeSelector) { try { - RestClient.selectHosts(nodeTuple, blacklist, new AtomicInteger(0), nodeSelector); + RestClient.selectNodes(nodeTuple, blacklist, new AtomicInteger(0), nodeSelector); throw new AssertionError("expected selectHosts to fail"); } catch (IOException e) { return e.getMessage(); @@ -478,5 +482,56 @@ private static RestClient createRestClient() { new Header[] {}, nodes, null, null, null); } + public void testRoundRobin() throws IOException { + int numNodes = randomIntBetween(2, 10); + AuthCache authCache = new BasicAuthCache(); + List nodes = new ArrayList<>(numNodes); + for (int i = 0; i < numNodes; i++) { + Node node = new Node(new HttpHost("localhost", 9200 + i)); + nodes.add(node); + authCache.put(node.getHost(), new BasicScheme()); + } + NodeTuple> nodeTuple = new NodeTuple<>(nodes, authCache); + + //test the transition from negative to positive values + AtomicInteger lastNodeIndex = new AtomicInteger(-numNodes); + assertNodes(nodeTuple, lastNodeIndex, 50); + assertEquals(-numNodes + 50, lastNodeIndex.get()); + + //test the highest positive values up to MAX_VALUE + lastNodeIndex.set(Integer.MAX_VALUE - numNodes * 10); + assertNodes(nodeTuple, lastNodeIndex, numNodes * 10); + assertEquals(Integer.MAX_VALUE, lastNodeIndex.get()); + + //test the transition from MAX_VALUE to MIN_VALUE + //this is the only time where there is most likely going to be a jump from a node + //to another one that's not necessarily the next one. + assertEquals(Integer.MIN_VALUE, lastNodeIndex.incrementAndGet()); + assertNodes(nodeTuple, lastNodeIndex, 50); + assertEquals(Integer.MIN_VALUE + 50, lastNodeIndex.get()); + } + private static void assertNodes(NodeTuple> nodeTuple, AtomicInteger lastNodeIndex, int runs) throws IOException { + int distance = lastNodeIndex.get() % nodeTuple.nodes.size(); + /* + * Collections.rotate is not super intuitive: distance 1 means that the last element will become the first and so on, + * while distance -1 means that the second element will become the first and so on. + */ + int expectedOffset = distance > 0 ? nodeTuple.nodes.size() - distance : Math.abs(distance); + for (int i = 0; i < runs; i++) { + Iterable selectedNodes = RestClient.selectNodes(nodeTuple, Collections.emptyMap(), + lastNodeIndex, NodeSelector.ANY); + List expectedNodes = nodeTuple.nodes; + int index = 0; + for (Node actualNode : selectedNodes) { + Node expectedNode = expectedNodes.get((index + expectedOffset) % expectedNodes.size()); + assertSame(expectedNode, actualNode); + index++; + } + expectedOffset--; + if (expectedOffset < 0) { + expectedOffset += nodeTuple.nodes.size(); + } + } + } } diff --git a/distribution/packages/src/deb/init.d/elasticsearch b/distribution/packages/src/deb/init.d/elasticsearch index 21ac80a9c22aa..5ff7a5e74c62f 100755 --- a/distribution/packages/src/deb/init.d/elasticsearch +++ b/distribution/packages/src/deb/init.d/elasticsearch @@ -122,7 +122,7 @@ case "$1" in ulimit -l $MAX_LOCKED_MEMORY fi - if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count -a "$MAX_MAP_COUNT" -ge $(cat /proc/sys/vm/max_map_count) ]; then + if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count -a "$MAX_MAP_COUNT" -gt $(cat /proc/sys/vm/max_map_count) ]; then sysctl -q -w vm.max_map_count=$MAX_MAP_COUNT fi diff --git a/distribution/packages/src/rpm/init.d/elasticsearch b/distribution/packages/src/rpm/init.d/elasticsearch index d0fb4f759d184..8f91db312738d 100644 --- a/distribution/packages/src/rpm/init.d/elasticsearch +++ b/distribution/packages/src/rpm/init.d/elasticsearch @@ -90,7 +90,7 @@ start() { if [ -n "$MAX_LOCKED_MEMORY" ]; then ulimit -l $MAX_LOCKED_MEMORY fi - if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count -a "$MAX_MAP_COUNT" -ge $(cat /proc/sys/vm/max_map_count) ]; then + if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count -a "$MAX_MAP_COUNT" -gt $(cat /proc/sys/vm/max_map_count) ]; then sysctl -q -w vm.max_map_count=$MAX_MAP_COUNT fi diff --git a/docs/README.asciidoc b/docs/README.asciidoc index 766aeae0c5d94..13a07e92d68e1 100644 --- a/docs/README.asciidoc +++ b/docs/README.asciidoc @@ -8,7 +8,8 @@ CONSOLE" and "COPY AS CURL" in the documentation and are automatically tested by the command `gradle :docs:check`. To test just the docs from a single page, use e.g. `gradle :docs:check -Dtests.method="\*rollover*"`. -NOTE: If you have an elasticsearch-extra folder alongside your elasticsearch folder, you must temporarily rename it when you are testing 6.3 or later branches. +NOTE: If you have an elasticsearch-extra folder alongside your elasticsearch +folder, you must temporarily rename it when you are testing 6.3 or later branches. By default each `// CONSOLE` snippet runs as its own isolated test. You can manipulate the test execution in the following ways: @@ -36,7 +37,8 @@ for its modifiers: reason why the test shouldn't be run. * `// TEST[setup:name]`: Run some setup code before running the snippet. This is useful for creating and populating indexes used in the snippet. The setup - code is defined in `docs/build.gradle`. + code is defined in `docs/build.gradle`. See `// TESTSETUP` below for a + similar feature. * `// TEST[warning:some warning]`: Expect the response to include a `Warning` header. If the response doesn't include a `Warning` header with the exact text then the test fails. If the response includes `Warning` headers that @@ -68,7 +70,9 @@ for its modifiers: a test that runs the setup snippet first. See the "painless" docs for a file that puts this to good use. This is fairly similar to `// TEST[setup:name]` but rather than the setup defined in `docs/build.gradle` the setup is defined - right in the documentation file. + right in the documentation file. In general, we should prefer `// TESTSETUP` + over `// TEST[setup:name]` because it makes it more clear what steps have to + be taken before the examples will work. In addition to the standard CONSOLE syntax these snippets can contain blocks of yaml surrounded by markers like this: diff --git a/docs/build.gradle b/docs/build.gradle index b04016c946eed..5f0caf4f19fd7 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -63,6 +63,8 @@ buildRestTests.docs = fileTree(projectDir) { exclude 'README.asciidoc' } +listSnippets.docs = buildRestTests.docs + Closure setupTwitter = { String name, int count -> buildRestTests.setups[name] = ''' - do: @@ -225,31 +227,6 @@ buildRestTests.doFirst { buildRestTests.setups['bank'].replace('#bank_data#', accounts) } -buildRestTests.setups['range_index'] = ''' - - do : - indices.create: - index: range_index - body: - settings: - number_of_shards: 2 - number_of_replicas: 1 - mappings: - _doc: - properties: - expected_attendees: - type: integer_range - time_frame: - type: date_range - format: yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis - - do: - bulk: - index: range_index - type: _doc - refresh: true - body: | - {"index":{"_id": 1}} - {"expected_attendees": {"gte": 10, "lte": 20}, "time_frame": {"gte": "2015-10-31 12:00:00", "lte": "2015-11-01"}}''' - // Used by index boost doc buildRestTests.setups['index_boost'] = ''' - do: @@ -603,4 +580,4 @@ buildRestTests.setups['library'] = ''' {"index":{"_id": "The Moon is a Harsh Mistress"}} {"name": "The Moon is a Harsh Mistress", "author": "Robert A. Heinlein", "release_date": "1966-04-01", "page_count": 288} -''' \ No newline at end of file +''' diff --git a/docs/java-rest/high-level/search/explain.asciidoc b/docs/java-rest/high-level/search/explain.asciidoc new file mode 100644 index 0000000000000..9e55ad77ea203 --- /dev/null +++ b/docs/java-rest/high-level/search/explain.asciidoc @@ -0,0 +1,113 @@ +[[java-rest-high-explain]] +=== Explain API + +The explain api computes a score explanation for a query and a specific document. +This can give useful feedback whether a document matches or didn’t match a specific query. + +[[java-rest-high-explain-request]] +==== Explain Request + +An `ExplainRequest` expects an `index`, a `type` and an `id` to specify a certain document, +and a query represented by `QueryBuilder` to run against it (the way of <>). + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request] +-------------------------------------------------- + +===== Optional arguments + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-routing] +-------------------------------------------------- +<1> Set a routing parameter + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-preference] +-------------------------------------------------- +<1> Use the preference parameter e.g. to execute the search to prefer local +shards. The default is to randomize across shards. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-source] +-------------------------------------------------- +<1> Set to true to retrieve the _source of the document explained. You can also +retrieve part of the document by using _source_include & _source_exclude +(see <> for more details) + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-stored-field] +-------------------------------------------------- +<1> Allows to control which stored fields to return as part of the document explained +(requires the field to be stored separately in the mappings). + +[[java-rest-high-explain-sync]] +==== Synchronous Execution + +The `explain` method executes the request synchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute] +-------------------------------------------------- + +[[java-rest-high-explain-async]] +==== Asynchronous Execution + +The `explainAsync` method executes the request asynchronously, +calling the provided `ActionListener` when the response is ready: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute-async] +-------------------------------------------------- +<1> The `ExplainRequest` to execute and the `ActionListener` to use when +the execution completes. + +The asynchronous method does not block and returns immediately. Once the request +completes, the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `ExplainResponse` is constructed as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. +<2> Called when the whole `FieldCapabilitiesRequest` fails. + +[[java-rest-high-explain-response]] +==== ExplainResponse + +The `ExplainResponse` contains the following information: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-response] +-------------------------------------------------- +<1> The index name of the explained document. +<2> The type name of the explained document. +<3> The id of the explained document. +<4> Indicates whether or not the explained document exists. +<5> Indicates whether or not there is a match between the explained document and +the provided query (the `match` is retrieved from the lucene `Explanation` behind the scenes +if the lucene `Explanation` models a match, it returns `true`, otherwise it returns `false`). +<6> Indicates whether or not there exists a lucene `Explanation` for this request. +<7> Get the lucene `Explanation` object if there exists. +<8> Get the `GetResult` object if the `_source` or the stored fields are retrieved. + +The `GetResult` contains two maps internally to store the fetched `_source` and stored fields. +You can use the following methods to get them: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[get-result] +-------------------------------------------------- +<1> Retrieve the `_source` as a map. +<2> Retrieve the specified stored fields as a map. diff --git a/docs/java-rest/high-level/search/multi-search-template.asciidoc b/docs/java-rest/high-level/search/multi-search-template.asciidoc new file mode 100644 index 0000000000000..c5133f6614eef --- /dev/null +++ b/docs/java-rest/high-level/search/multi-search-template.asciidoc @@ -0,0 +1,81 @@ +[[java-rest-high-multi-search-template]] +=== Multi-Search-Template API + +The `multiSearchTemplate` API executes multiple <> +requests in a single http request in parallel. + +[[java-rest-high-multi-search-template-request]] +==== Multi-Search-Template Request + +The `MultiSearchTemplateRequest` is built empty and you add all of the searches that +you wish to execute to it: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-request-inline] +-------------------------------------------------- +<1> Create an empty `MultiSearchTemplateRequest`. +<2> Create one or more `SearchTemplateRequest` objects and populate them just like you +would for a regular <>. +<3> Add the `SearchTemplateRequest` to the `MultiSearchTemplateRequest`. + +===== Optional arguments + +The multiSearchTemplate's `max_concurrent_searches` request parameter can be used to control +the maximum number of concurrent searches the multi search api will execute. +This default is based on the number of data nodes and the default search thread pool size. + +[[java-rest-high-multi-search-template-sync]] +==== Synchronous Execution + +The `multiSearchTemplate` method executes `MultiSearchTemplateRequest`s synchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-request-sync] +-------------------------------------------------- + +[[java-rest-high-multi-search-template-async]] +==== Asynchronous Execution + +The `multiSearchTemplateAsync` method executes `MultiSearchTemplateRequest`s asynchronously, +calling the provided `ActionListener` when the response is ready. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-execute-async] +-------------------------------------------------- +The parameters are the `MultiSearchTemplateRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `MultiSearchTemplateResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. +<2> Called when the whole `MultiSearchTemplateRequest` fails. + +==== MultiSearchTemplateResponse + +The `MultiSearchTemplateResponse` that is returned by executing the `multiSearchTemplate` method contains +a `MultiSearchTemplateResponse.Item` for each `SearchTemplateRequest` in the +`MultiSearchTemplateRequest`. Each `MultiSearchTemplateResponse.Item` contains an +exception in `getFailure` if the request failed or a +<> in `getResponse` if +the request succeeded: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[multi-search-template-response] +-------------------------------------------------- +<1> An array of responses is returned - one response for each request +<2> Failed search template requests have error messages +<3> Successful requests contain a <> in +`getResponse`. diff --git a/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc b/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc new file mode 100644 index 0000000000000..dbd31380a9b4b --- /dev/null +++ b/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc @@ -0,0 +1,121 @@ +[[java-rest-high-snapshot-create-snapshot]] +=== Create Snapshot API + +Use the Create Snapshot API to create a new snapshot. + +[[java-rest-high-snapshot-create-snapshot-request]] +==== Create Snapshot Request + +A `CreateSnapshotRequest`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request] +-------------------------------------------------- + +==== Required Arguments +The following arguments are mandatory: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-repositoryName] +-------------------------------------------------- +<1> The name of the repository. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-snapshotName] +-------------------------------------------------- +<1> The name of the snapshot. + +==== Optional Arguments +The following arguments are optional: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-indices] +-------------------------------------------------- +<1> A list of indices the snapshot is applied to. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-indicesOptions] +-------------------------------------------------- +<1> Options applied to the indices. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-partial] +-------------------------------------------------- +<1> Set `partial` to `true` to allow a successful snapshot without the +availability of all the indices primary shards. Defaults to `false`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-includeGlobalState] +-------------------------------------------------- +<1> Set `includeGlobalState` to `false` to prevent writing the cluster's global +state as part of the snapshot. Defaults to `true`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue`. +<2> Timeout to connect to the master node as a `String`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-waitForCompletion] +-------------------------------------------------- +<1> Waits for the snapshot to be completed before a response is returned. + +[[java-rest-high-snapshot-create-snapshot-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute] +-------------------------------------------------- + +[[java-rest-high-snapshot-create-snapshot-async]] +==== Asynchronous Execution + +The asynchronous execution of a create snapshot request requires both the +`CreateSnapshotRequest` instance and an `ActionListener` instance to be +passed as arguments to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute-async] +-------------------------------------------------- +<1> The `CreateSnapshotRequest` to execute and the `ActionListener` to use when +the execution completes. + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back with the `onResponse` method +if the execution is successful or the `onFailure` method if the execution +failed. + +A typical listener for `CreateSnapshotResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument. +<2> Called in case of a failure. The raised exception is provided as an +argument. + +[[java-rest-high-snapshot-create-snapshot-response]] +==== Snapshot Create Response + +Use the `CreateSnapshotResponse` to retrieve information about the evaluated +request: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-response] +-------------------------------------------------- +<1> Indicates the node has started the request. diff --git a/docs/java-rest/high-level/snapshot/get_snapshots.asciidoc b/docs/java-rest/high-level/snapshot/get_snapshots.asciidoc new file mode 100644 index 0000000000000..191c7173f1597 --- /dev/null +++ b/docs/java-rest/high-level/snapshot/get_snapshots.asciidoc @@ -0,0 +1,103 @@ +[[java-rest-high-snapshot-get-snapshots]] +=== Get Snapshots API + +Use the Get Snapshot API to get snapshots. + +[[java-rest-high-snapshot-get-snapshots-request]] +==== Get Snapshots Request + +A `GetSnapshotsRequest`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request] +-------------------------------------------------- + +==== Required Arguments +The following arguments are mandatory: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-repositoryName] +-------------------------------------------------- +<1> The name of the repository. + +==== Optional Arguments +The following arguments are optional: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-snapshots] +-------------------------------------------------- +<1> An array of snapshots to get. Otherwise it will return all snapshots for a repository. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue`. +<2> Timeout to connect to the master node as a `String`. + + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-verbose] +-------------------------------------------------- +<1> `Boolean` indicating if the response should be verbose. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-request-ignore-unavailable] +-------------------------------------------------- +<1> `Boolean` indicating if unavailable snapshots should be ignored. Otherwise the request will +fail if any of the snapshots are unavailable. + +[[java-rest-high-snapshot-get-snapshots-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-execute] +-------------------------------------------------- + +[[java-rest-high-snapshot-get-snapshots-async]] +==== Asynchronous Execution + +The asynchronous execution of a get snapshots request requires both the +`GetSnapshotsRequest` instance and an `ActionListener` instance to be +passed as arguments to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-execute-async] +-------------------------------------------------- +<1> The `GetSnapshotsRequest` to execute and the `ActionListener` to use when +the execution completes. + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back with the `onResponse` method +if the execution is successful or the `onFailure` method if the execution +failed. + +A typical listener for `GetSnapshotsResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument. +<2> Called in case of a failure. The raised exception is provided as an +argument. + +[[java-rest-high-snapshot-get-snapshots-response]] +==== Get Snapshots Response + +Use the `GetSnapshotsResponse` to retrieve information about the evaluated +request: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[get-snapshots-response] +-------------------------------------------------- +<1> Indicates the node has started the request. \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 9ed54db817551..d2484db1d7860 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -32,16 +32,20 @@ The Java High Level REST Client supports the following Search APIs: * <> * <> * <> +* <> * <> * <> * <> +* <> include::search/search.asciidoc[] include::search/scroll.asciidoc[] include::search/multi-search.asciidoc[] include::search/search-template.asciidoc[] +include::search/multi-search-template.asciidoc[] include::search/field-caps.asciidoc[] include::search/rank-eval.asciidoc[] +include::search/explain.asciidoc[] == Miscellaneous APIs @@ -140,12 +144,16 @@ The Java High Level REST Client supports the following Snapshot APIs: * <> * <> * <> +* <> +* <> * <> include::snapshot/get_repository.asciidoc[] include::snapshot/create_repository.asciidoc[] include::snapshot/delete_repository.asciidoc[] include::snapshot/verify_repository.asciidoc[] +include::snapshot/create_snapshot.asciidoc[] +include::snapshot/get_snapshots.asciidoc[] include::snapshot/delete_snapshot.asciidoc[] == Tasks APIs diff --git a/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc index 3fd0d21ae4e69..b9dac1dd0fb0c 100644 --- a/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc @@ -18,7 +18,7 @@ value. For example, if a value is greater than or equal to 95% of the observed it is said to be at the 95th percentile rank. Assume your data consists of website load times. You may have a service agreement that -95% of page loads completely within 15ms and 99% of page loads complete within 30ms. +95% of page loads completely within 500ms and 99% of page loads complete within 600ms. Let's look at a range of percentiles representing load time: diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index c04bbd6813795..e6a47e0a39823 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -428,7 +428,7 @@ The `username` and `password` parameters are optional, and when they are present will connect to the remote Elasticsearch node using basic auth. Be sure to use `https` when using basic auth or the password will be sent in plain text. -Remote hosts have to be explicitly whitelisted in elasticsearch.yaml using the +Remote hosts have to be explicitly whitelisted in elasticsearch.yml using the `reindex.remote.whitelist` property. It can be set to a comma delimited list of allowed remote `host` and `port` combinations (e.g. `otherhost:9200, another:9200, 127.0.10.*:9200, localhost:*`). Scheme is diff --git a/docs/reference/mapping/types/range.asciidoc b/docs/reference/mapping/types/range.asciidoc index a7ab6346176cb..082d012a49058 100644 --- a/docs/reference/mapping/types/range.asciidoc +++ b/docs/reference/mapping/types/range.asciidoc @@ -18,6 +18,9 @@ Below is an example of configuring a mapping with various range fields followed -------------------------------------------------- PUT range_index { + "settings": { + "number_of_shards": 2 + }, "mappings": { "_doc": { "properties": { @@ -33,7 +36,7 @@ PUT range_index } } -PUT range_index/_doc/1 +PUT range_index/_doc/1?refresh { "expected_attendees" : { <2> "gte" : 10, @@ -46,6 +49,7 @@ PUT range_index/_doc/1 } -------------------------------------------------- //CONSOLE +// TESTSETUP <1> `date_range` types accept the same field parameters defined by the <> type. <2> Example indexing a meeting with 10 to 20 attendees. @@ -68,7 +72,6 @@ GET range_index/_search } -------------------------------------------------- // CONSOLE -// TEST[setup:range_index] The result produced by the above query. @@ -125,7 +128,6 @@ GET range_index/_search } -------------------------------------------------- // CONSOLE -// TEST[setup:range_index] <1> Range queries work the same as described in <>. <2> Range queries over range <> support a `relation` parameter which can be one of `WITHIN`, `CONTAINS`, @@ -191,7 +193,6 @@ PUT range_index/_doc/2 } -------------------------------------------------- // CONSOLE -// TEST[setup:range_index] [[range-params]] ==== Parameters for range fields diff --git a/docs/reference/query-dsl/common-terms-query.asciidoc b/docs/reference/query-dsl/common-terms-query.asciidoc index 41034f357ce4c..87288778246a6 100644 --- a/docs/reference/query-dsl/common-terms-query.asciidoc +++ b/docs/reference/query-dsl/common-terms-query.asciidoc @@ -184,8 +184,6 @@ GET /_search -------------------------------------------------- // CONSOLE -minimum_should_match - A different <> can be applied for low and high frequency terms with the additional diff --git a/docs/reference/query-dsl/full-text-queries.asciidoc b/docs/reference/query-dsl/full-text-queries.asciidoc index ba3924669d812..aaa0a911372c8 100644 --- a/docs/reference/query-dsl/full-text-queries.asciidoc +++ b/docs/reference/query-dsl/full-text-queries.asciidoc @@ -25,7 +25,7 @@ The queries in this group are: The multi-field version of the `match` query. -<>:: +<>:: A more specialized query which gives more preference to uncommon words. @@ -35,7 +35,7 @@ The queries in this group are: allowing you to specify AND|OR|NOT conditions and multi-field search within a single query string. For expert users only. -<>:: +<>:: A simpler, more robust version of the `query_string` syntax suitable for exposing directly to users. diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index a5fe1cb94b9ee..758de960ec794 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 7962563f742fe..37e3d3699fafc 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ -distributionUrl=https\://services.gradle.org/distributions/gradle-4.7-all.zip distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-4.8.1-all.zip zipStoreBase=GRADLE_USER_HOME -distributionSha256Sum=203f4537da8b8075e38c036a6d14cb71b1149de5bf0a8f6db32ac2833a1d1294 +zipStorePath=wrapper/dists +distributionSha256Sum=ce1645ff129d11aad62dab70d63426fdce6cfd646fa309dc5dc5255dd03c7c11 diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java index bd294db6d2957..caa9fa4831add 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java @@ -23,13 +23,21 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -126,4 +134,39 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeStreamableList(requests); } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MultiSearchTemplateRequest that = (MultiSearchTemplateRequest) o; + return maxConcurrentSearchRequests == that.maxConcurrentSearchRequests && + Objects.equals(requests, that.requests) && + Objects.equals(indicesOptions, that.indicesOptions); + } + + @Override + public int hashCode() { + return Objects.hash(maxConcurrentSearchRequests, requests, indicesOptions); + } + + public static byte[] writeMultiLineFormat(MultiSearchTemplateRequest multiSearchTemplateRequest, + XContent xContent) throws IOException { + ByteArrayOutputStream output = new ByteArrayOutputStream(); + for (SearchTemplateRequest templateRequest : multiSearchTemplateRequest.requests()) { + final SearchRequest searchRequest = templateRequest.getRequest(); + try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) { + MultiSearchRequest.writeSearchRequestParams(searchRequest, xContentBuilder); + BytesReference.bytes(xContentBuilder).writeTo(output); + } + output.write(xContent.streamSeparator()); + try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) { + templateRequest.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); + BytesReference.bytes(xContentBuilder).writeTo(output); + } + output.write(xContent.streamSeparator()); + } + return output.toByteArray(); + } + } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java index b5bc86679aed4..74f3bc743aeb2 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java @@ -22,6 +22,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -31,6 +32,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Arrays; @@ -106,6 +108,13 @@ public void writeTo(StreamOutput out) throws IOException { public Exception getFailure() { return exception; } + + @Override + public String toString() { + return "Item [response=" + response + ", exception=" + exception + "]"; + } + + } private Item[] items; @@ -117,7 +126,7 @@ public Exception getFailure() { public MultiSearchTemplateResponse(Item[] items, long tookInMillis) { this.items = items; this.tookInMillis = tookInMillis; - } + } @Override public Iterator iterator() { @@ -184,6 +193,23 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par static final class Fields { static final String RESPONSES = "responses"; } + + public static MultiSearchTemplateResponse fromXContext(XContentParser parser) { + //The MultiSearchTemplateResponse is identical to the multi search response so we reuse the parsing logic in multi search response + MultiSearchResponse mSearchResponse = MultiSearchResponse.fromXContext(parser); + org.elasticsearch.action.search.MultiSearchResponse.Item[] responses = mSearchResponse.getResponses(); + Item[] templateResponses = new Item[responses.length]; + int i = 0; + for (org.elasticsearch.action.search.MultiSearchResponse.Item item : responses) { + SearchTemplateResponse stResponse = null; + if(item.getResponse() != null){ + stResponse = new SearchTemplateResponse(); + stResponse.setResponse(item.getResponse()); + } + templateResponses[i++] = new Item(stResponse, item.getFailure()); + } + return new MultiSearchTemplateResponse(templateResponses, mSearchResponse.getTook().millis()); + } @Override public String toString() { diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index 500a5a399ef4a..6d19afbfd6fe6 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -66,6 +66,11 @@ public void setResponse(SearchResponse searchResponse) { public boolean hasResponse() { return response != null; + } + + @Override + public String toString() { + return "SearchTemplateResponse [source=" + source + ", response=" + response + "]"; } @Override diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java index 1ff5479623765..ee967237d3c9b 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java @@ -19,14 +19,22 @@ package org.elasticsearch.script.mustache; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.Scroll; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.FakeRestRequest; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; + import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -97,5 +105,57 @@ public void testMaxConcurrentSearchRequests() { expectThrows(IllegalArgumentException.class, () -> request.maxConcurrentSearchRequests(randomIntBetween(Integer.MIN_VALUE, 0))); } + + public void testMultiSearchTemplateToJson() throws Exception { + final int numSearchRequests = randomIntBetween(1, 10); + MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest(); + for (int i = 0; i < numSearchRequests; i++) { + // Create a random request. + String[] indices = {"test"}; + SearchRequest searchRequest = new SearchRequest(indices); + // scroll is not supported in the current msearch or msearchtemplate api, so unset it: + searchRequest.scroll((Scroll) null); + // batched reduce size is currently not set-able on a per-request basis as it is a query string parameter only + searchRequest.setBatchedReduceSize(SearchRequest.DEFAULT_BATCHED_REDUCE_SIZE); + SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(searchRequest); + + searchTemplateRequest.setScript("{\"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" }}}"); + searchTemplateRequest.setScriptType(ScriptType.INLINE); + searchTemplateRequest.setProfile(randomBoolean()); + + Map scriptParams = new HashMap<>(); + scriptParams.put("field", "name"); + scriptParams.put("value", randomAlphaOfLengthBetween(2, 5)); + searchTemplateRequest.setScriptParams(scriptParams); + + multiSearchTemplateRequest.add(searchTemplateRequest); + } + + //Serialize the request + String serialized = toJsonString(multiSearchTemplateRequest); + + //Deserialize the request + RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()) + .withContent(new BytesArray(serialized), XContentType.JSON).build(); + MultiSearchTemplateRequest deser = RestMultiSearchTemplateAction.parseRequest(restRequest, true); + + // For object equality purposes need to set the search requests' source to non-null + for (SearchTemplateRequest str : deser.requests()) { + SearchRequest sr = str.getRequest(); + if (sr.source() == null) { + sr.source(new SearchSourceBuilder()); + } + } + // Compare the deserialized request object with the original request object + assertEquals(multiSearchTemplateRequest, deser); + + // Finally, serialize the deserialized request to compare JSON equivalence (in case Object.equals() fails to reveal a discrepancy) + assertEquals(serialized, toJsonString(deser)); + } + + protected String toJsonString(MultiSearchTemplateRequest multiSearchTemplateRequest) throws IOException { + byte[] bytes = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, XContentType.JSON.xContent()); + return new String(bytes, StandardCharsets.UTF_8); + } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java new file mode 100644 index 0000000000000..2c67dd4709bc9 --- /dev/null +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.script.mustache; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.function.Predicate; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class MultiSearchTemplateResponseTests extends AbstractXContentTestCase { + + @Override + protected MultiSearchTemplateResponse createTestInstance() { + int numItems = randomIntBetween(0, 128); + long overallTookInMillis = randomNonNegativeLong(); + MultiSearchTemplateResponse.Item[] items = new MultiSearchTemplateResponse.Item[numItems]; + for (int i = 0; i < numItems; i++) { + // Creating a minimal response is OK, because SearchResponse self + // is tested elsewhere. + long tookInMillis = randomNonNegativeLong(); + int totalShards = randomIntBetween(1, Integer.MAX_VALUE); + int successfulShards = randomIntBetween(0, totalShards); + int skippedShards = totalShards - successfulShards; + InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); + SearchResponse.Clusters clusters = new SearchResponse.Clusters(totalShards, successfulShards, skippedShards); + SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); + SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, totalShards, + successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters); + searchTemplateResponse.setResponse(searchResponse); + items[i] = new MultiSearchTemplateResponse.Item(searchTemplateResponse, null); + } + return new MultiSearchTemplateResponse(items, overallTookInMillis); + } + + + private static MultiSearchTemplateResponse createTestInstanceWithFailures() { + int numItems = randomIntBetween(0, 128); + long overallTookInMillis = randomNonNegativeLong(); + MultiSearchTemplateResponse.Item[] items = new MultiSearchTemplateResponse.Item[numItems]; + for (int i = 0; i < numItems; i++) { + if (randomBoolean()) { + // Creating a minimal response is OK, because SearchResponse self + // is tested elsewhere. + long tookInMillis = randomNonNegativeLong(); + int totalShards = randomIntBetween(1, Integer.MAX_VALUE); + int successfulShards = randomIntBetween(0, totalShards); + int skippedShards = totalShards - successfulShards; + InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); + SearchResponse.Clusters clusters = new SearchResponse.Clusters(totalShards, successfulShards, skippedShards); + SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); + SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, totalShards, + successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters); + searchTemplateResponse.setResponse(searchResponse); + items[i] = new MultiSearchTemplateResponse.Item(searchTemplateResponse, null); + } else { + items[i] = new MultiSearchTemplateResponse.Item(null, new ElasticsearchException("an error")); + } + } + return new MultiSearchTemplateResponse(items, overallTookInMillis); + } + + @Override + protected MultiSearchTemplateResponse doParseInstance(XContentParser parser) throws IOException { + return MultiSearchTemplateResponse.fromXContext(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + protected Predicate getRandomFieldsExcludeFilterWhenResultHasErrors() { + return field -> field.startsWith("responses"); + } + + @Override + protected void assertEqualInstances(MultiSearchTemplateResponse expectedInstance, MultiSearchTemplateResponse newInstance) { + assertThat(newInstance.getTook(), equalTo(expectedInstance.getTook())); + assertThat(newInstance.getResponses().length, equalTo(expectedInstance.getResponses().length)); + for (int i = 0; i < expectedInstance.getResponses().length; i++) { + MultiSearchTemplateResponse.Item expectedItem = expectedInstance.getResponses()[i]; + MultiSearchTemplateResponse.Item actualItem = newInstance.getResponses()[i]; + if (expectedItem.isFailure()) { + assertThat(actualItem.getResponse(), nullValue()); + assertThat(actualItem.getFailureMessage(), containsString(expectedItem.getFailureMessage())); + } else { + assertThat(actualItem.getResponse().toString(), equalTo(expectedItem.getResponse().toString())); + assertThat(actualItem.getFailure(), nullValue()); + } + } + } + + /** + * Test parsing {@link MultiSearchTemplateResponse} with inner failures as they don't support asserting on xcontent equivalence, given + * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} + * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = MultiSearchTemplateResponseTests::createTestInstanceWithFailures; + //with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, + //but that does not bother our assertions, as we only want to test that we don't break. + boolean supportsUnknownFields = true; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, Strings.EMPTY_ARRAY, + getRandomFieldsExcludeFilterWhenResultHasErrors(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, ToXContent.EMPTY_PARAMS); + } + +} diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java index 1821166c8845e..751e00f06adbb 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java @@ -100,14 +100,14 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize) t public void deleteBlob(String blobName) throws IOException { logger.trace("deleteBlob({})", blobName); - if (!blobExists(blobName)) { - throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); - } - try { blobStore.deleteBlob(buildKey(blobName)); - } catch (URISyntaxException | StorageException e) { - logger.warn("can not access [{}] in container {{}}: {}", blobName, blobStore, e.getMessage()); + } catch (StorageException e) { + if (e.getHttpStatusCode() == HttpURLConnection.HTTP_NOT_FOUND) { + throw new NoSuchFileException(e.getMessage()); + } + throw new IOException(e); + } catch (URISyntaxException e) { throw new IOException(e); } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageServiceImpl.java index e3f56323f3cbf..c10a6d674c848 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageServiceImpl.java @@ -150,19 +150,17 @@ public void createContainer(String account, String container) throws URISyntaxEx public void deleteFiles(String account, String container, String path) throws URISyntaxException, StorageException { final Tuple> client = client(account); // container name must be lower case. - final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); logger.trace(() -> new ParameterizedMessage("delete files container [{}], path [{}]", container, path)); SocketAccess.doPrivilegedVoidException(() -> { - if (blobContainer.exists()) { - // list the blobs using a flat blob listing mode - for (final ListBlobItem blobItem : blobContainer.listBlobs(path, true, EnumSet.noneOf(BlobListingDetails.class), null, - client.v2().get())) { - final String blobName = blobNameFromUri(blobItem.getUri()); - logger.trace(() -> new ParameterizedMessage("removing blob [{}] full URI was [{}]", blobName, blobItem.getUri())); - // don't call {@code #deleteBlob}, use the same client - final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blobName); - azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, client.v2().get()); - } + // list the blobs using a flat blob listing mode + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + for (final ListBlobItem blobItem : blobContainer.listBlobs(path, true, EnumSet.noneOf(BlobListingDetails.class), null, + client.v2().get())) { + final String blobName = blobNameFromUri(blobItem.getUri()); + logger.trace(() -> new ParameterizedMessage("removing blob [{}] full URI was [{}]", blobName, blobItem.getUri())); + // don't call {@code #deleteBlob}, use the same client + final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blobName); + azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, client.v2().get()); } }); } @@ -192,11 +190,8 @@ public boolean blobExists(String account, String container, String blob) final Tuple> client = client(account); final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); return SocketAccess.doPrivilegedException(() -> { - if (blobContainer.exists(null, null, client.v2().get())) { - final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob); - return azureBlob.exists(null, null, client.v2().get()); - } - return false; + final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob); + return azureBlob.exists(null, null, client.v2().get()); }); } @@ -207,11 +202,9 @@ public void deleteBlob(String account, String container, String blob) throws URI final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); logger.trace(() -> new ParameterizedMessage("delete blob for container [{}], blob [{}]", container, blob)); SocketAccess.doPrivilegedVoidException(() -> { - if (blobContainer.exists(null, null, client.v2().get())) { - final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob); - logger.trace(() -> new ParameterizedMessage("container [{}]: blob [{}] found. removing.", container, blob)); - azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, client.v2().get()); - } + final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob); + logger.trace(() -> new ParameterizedMessage("container [{}]: blob [{}] found. removing.", container, blob)); + azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, client.v2().get()); }); } @@ -238,19 +231,17 @@ public Map listBlobsByPrefix(String account, String contai final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); logger.trace(() -> new ParameterizedMessage("listing container [{}], keyPath [{}], prefix [{}]", container, keyPath, prefix)); SocketAccess.doPrivilegedVoidException(() -> { - if (blobContainer.exists()) { - for (final ListBlobItem blobItem : blobContainer.listBlobs(keyPath + (prefix == null ? "" : prefix), false, - enumBlobListingDetails, null, client.v2().get())) { - final URI uri = blobItem.getUri(); - logger.trace(() -> new ParameterizedMessage("blob url [{}]", uri)); - // uri.getPath is of the form /container/keyPath.* and we want to strip off the /container/ - // this requires 1 + container.length() + 1, with each 1 corresponding to one of the / - final String blobPath = uri.getPath().substring(1 + container.length() + 1); - final BlobProperties properties = ((CloudBlockBlob) blobItem).getProperties(); - final String name = blobPath.substring(keyPath.length()); - logger.trace(() -> new ParameterizedMessage("blob url [{}], name [{}], size [{}]", uri, name, properties.getLength())); - blobsBuilder.put(name, new PlainBlobMetaData(name, properties.getLength())); - } + for (final ListBlobItem blobItem : blobContainer.listBlobs(keyPath + (prefix == null ? "" : prefix), false, + enumBlobListingDetails, null, client.v2().get())) { + final URI uri = blobItem.getUri(); + logger.trace(() -> new ParameterizedMessage("blob url [{}]", uri)); + // uri.getPath is of the form /container/keyPath.* and we want to strip off the /container/ + // this requires 1 + container.length() + 1, with each 1 corresponding to one of the / + final String blobPath = uri.getPath().substring(1 + container.length() + 1); + final BlobProperties properties = ((CloudBlockBlob) blobItem).getProperties(); + final String name = blobPath.substring(keyPath.length()); + logger.trace(() -> new ParameterizedMessage("blob url [{}], name [{}], size [{}]", uri, name, properties.getLength())); + blobsBuilder.put(name, new PlainBlobMetaData(name, properties.getLength())); } }); return blobsBuilder.immutableMap(); @@ -264,8 +255,8 @@ public void writeBlob(String account, String container, String blobName, InputSt final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); final CloudBlockBlob blob = blobContainer.getBlockBlobReference(blobName); try { - SocketAccess.doPrivilegedVoidException(() -> blob.upload(inputStream, blobSize, AccessCondition.generateIfNotExistsCondition(), - null, client.v2().get())); + SocketAccess.doPrivilegedVoidException(() -> + blob.upload(inputStream, blobSize, AccessCondition.generateIfNotExistsCondition(), null, client.v2().get())); } catch (final StorageException se) { if (se.getHttpStatusCode() == HttpURLConnection.HTTP_CONFLICT && StorageErrorCodeStrings.BLOB_ALREADY_EXISTS.equals(se.getErrorCode())) { diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java index a680af06fc655..46af05c3845aa 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java @@ -22,7 +22,6 @@ import com.microsoft.azure.storage.OperationContext; import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.blob.CloudBlobClient; - import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; @@ -72,9 +71,11 @@ public void createContainer(String account, String container) { } @Override - public void deleteFiles(String account, String container, String path) { + public void deleteFiles(String account, String container, String path) throws URISyntaxException, StorageException { final Map blobs = listBlobsByPrefix(account, container, path, null); - blobs.keySet().forEach(key -> deleteBlob(account, container, key)); + for (String key : blobs.keySet()) { + deleteBlob(account, container, key); + } } @Override @@ -83,8 +84,10 @@ public boolean blobExists(String account, String container, String blob) { } @Override - public void deleteBlob(String account, String container, String blob) { - blobs.remove(blob); + public void deleteBlob(String account, String container, String blob) throws URISyntaxException, StorageException { + if (blobs.remove(blob) == null) { + throw new StorageException("BlobNotFound", "[" + blob + "] does not exist.", 404, null, null); + } } @Override diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index c0f61e4d07828..05218caa0651b 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -25,7 +25,6 @@ import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.S3ObjectSummary; import com.amazonaws.services.s3.model.StorageClass; - import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; @@ -40,7 +39,7 @@ class S3BlobStore extends AbstractComponent implements BlobStore { - private final AwsS3Service service; + private final S3Service service; private final String clientName; @@ -54,7 +53,7 @@ class S3BlobStore extends AbstractComponent implements BlobStore { private final StorageClass storageClass; - S3BlobStore(Settings settings, AwsS3Service service, String clientName, String bucket, boolean serverSideEncryption, + S3BlobStore(Settings settings, S3Service service, String clientName, String bucket, boolean serverSideEncryption, ByteSizeValue bufferSize, String cannedACL, String storageClass) { super(settings); this.service = service; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index 063e266837bad..f6f949aa4d012 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -20,7 +20,6 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.auth.BasicAWSCredentials; - import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; @@ -156,8 +155,10 @@ class S3Repository extends BlobStoreRepository { /** * Constructs an s3 backed repository */ - S3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry, - AwsS3Service awsService) throws IOException { + S3Repository(final RepositoryMetaData metadata, + final Settings settings, + final NamedXContentRegistry namedXContentRegistry, + final S3Service service) throws IOException { super(metadata, settings, namedXContentRegistry); final String bucket = BUCKET_SETTING.get(metadata.settings()); @@ -188,9 +189,9 @@ class S3Repository extends BlobStoreRepository { // deprecated behavior: override client credentials from the cluster state // (repository settings) if (S3ClientSettings.checkDeprecatedCredentials(metadata.settings())) { - overrideCredentialsFromClusterState(awsService); + overrideCredentialsFromClusterState(service); } - blobStore = new S3BlobStore(settings, awsService, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); + blobStore = new S3BlobStore(settings, service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); final String basePath = BASE_PATH_SETTING.get(metadata.settings()); if (Strings.hasLength(basePath)) { @@ -220,13 +221,13 @@ protected ByteSizeValue chunkSize() { return chunkSize; } - void overrideCredentialsFromClusterState(AwsS3Service awsService) { + void overrideCredentialsFromClusterState(final S3Service s3Service) { deprecationLogger.deprecated("Using s3 access/secret key from repository settings. Instead " + "store these in named clients and the elasticsearch keystore for secure settings."); final BasicAWSCredentials insecureCredentials = S3ClientSettings.loadDeprecatedCredentials(metadata.settings()); // hack, but that's ok because the whole if branch should be axed - final Map prevSettings = awsService.refreshAndClearCache(S3ClientSettings.load(Settings.EMPTY)); + final Map prevSettings = s3Service.refreshAndClearCache(S3ClientSettings.load(Settings.EMPTY)); final Map newSettings = S3ClientSettings.overrideCredentials(prevSettings, insecureCredentials); - awsService.refreshAndClearCache(newSettings); + s3Service.refreshAndClearCache(newSettings); } } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java index 93561c94d2b9a..6a605319114fe 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java @@ -19,14 +19,6 @@ package org.elasticsearch.repositories.s3; -import java.io.IOException; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; - import com.amazonaws.util.json.Jackson; import org.elasticsearch.SpecialPermission; import org.elasticsearch.cluster.metadata.RepositoryMetaData; @@ -39,6 +31,15 @@ import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; +import java.io.IOException; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; + /** * A plugin to add a repository type that writes to and from the AWS S3. */ @@ -60,33 +61,29 @@ public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin, Relo }); } - private final AwsS3Service awsS3Service; + private final S3Service service; - public S3RepositoryPlugin(Settings settings) { - this.awsS3Service = getAwsS3Service(settings); - // eagerly load client settings so that secure settings are read - final Map clientsSettings = S3ClientSettings.load(settings); - this.awsS3Service.refreshAndClearCache(clientsSettings); + public S3RepositoryPlugin(final Settings settings) { + this(settings, new S3Service(settings)); } - protected S3RepositoryPlugin(AwsS3Service awsS3Service) { - this.awsS3Service = awsS3Service; - } - - // proxy method for testing - protected S3Repository getS3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry) - throws IOException { - return new S3Repository(metadata, settings, namedXContentRegistry, awsS3Service); + S3RepositoryPlugin(final Settings settings, final S3Service service) { + this.service = Objects.requireNonNull(service, "S3 service must not be null"); + // eagerly load client settings so that secure settings are read + final Map clientsSettings = S3ClientSettings.load(settings); + this.service.refreshAndClearCache(clientsSettings); } // proxy method for testing - protected AwsS3Service getAwsS3Service(Settings settings) { - return new InternalAwsS3Service(settings); + protected S3Repository createRepository(final RepositoryMetaData metadata, + final Settings settings, + final NamedXContentRegistry registry) throws IOException { + return new S3Repository(metadata, settings, registry, service); } @Override - public Map getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry) { - return Collections.singletonMap(S3Repository.TYPE, (metadata) -> getS3Repository(metadata, env.settings(), namedXContentRegistry)); + public Map getRepositories(final Environment env, final NamedXContentRegistry registry) { + return Collections.singletonMap(S3Repository.TYPE, (metadata) -> createRepository(metadata, env.settings(), registry)); } @Override @@ -112,11 +109,11 @@ public List> getSettings() { public void reload(Settings settings) { // secure settings should be readable final Map clientsSettings = S3ClientSettings.load(settings); - awsS3Service.refreshAndClearCache(clientsSettings); + service.refreshAndClearCache(clientsSettings); } @Override public void close() throws IOException { - awsS3Service.close(); + service.close(); } } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java similarity index 98% rename from plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java rename to plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index a54320f1fbd19..b59f740f2048d 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -28,24 +28,25 @@ import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; - import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import java.io.Closeable; import java.io.IOException; import java.util.Map; + import static java.util.Collections.emptyMap; -class InternalAwsS3Service extends AbstractComponent implements AwsS3Service { +class S3Service extends AbstractComponent implements Closeable { private volatile Map clientsCache = emptyMap(); private volatile Map clientsSettings = emptyMap(); - InternalAwsS3Service(Settings settings) { + S3Service(Settings settings) { super(settings); } @@ -55,7 +56,6 @@ class InternalAwsS3Service extends AbstractComponent implements AwsS3Service { * clients are usable until released. On release they will be destroyed instead * to being returned to the cache. */ - @Override public synchronized Map refreshAndClearCache(Map clientsSettings) { // shutdown all unused clients // others will shutdown on their respective release @@ -71,7 +71,6 @@ public synchronized Map refreshAndClearCache(Map creating s3 repository with bucket[{}] and path [{}]", internalCluster().getInstance(Settings.class).get("repositories.s3.bucket"), basePath); - PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") - .setType("s3").setSettings(settings - ).get(); - assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); - - createIndex("test-idx-1", "test-idx-2", "test-idx-3"); - ensureGreen(); - - logger.info("--> indexing some data"); - for (int i = 0; i < 100; i++) { - index("test-idx-1", "doc", Integer.toString(i), "foo", "bar" + i); - index("test-idx-2", "doc", Integer.toString(i), "foo", "baz" + i); - index("test-idx-3", "doc", Integer.toString(i), "foo", "baz" + i); - } - refresh(); - assertThat(client.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), equalTo(100L)); - assertThat(client.prepareSearch("test-idx-2").setSize(0).get().getHits().getTotalHits(), equalTo(100L)); - assertThat(client.prepareSearch("test-idx-3").setSize(0).get().getHits().getTotalHits(), equalTo(100L)); - - logger.info("--> snapshot"); - CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx-*", "-test-idx-3").get(); - assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); - assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); - - assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS)); - - logger.info("--> delete some data"); - for (int i = 0; i < 50; i++) { - client.prepareDelete("test-idx-1", "doc", Integer.toString(i)).get(); - } - for (int i = 50; i < 100; i++) { - client.prepareDelete("test-idx-2", "doc", Integer.toString(i)).get(); - } - for (int i = 0; i < 100; i += 2) { - client.prepareDelete("test-idx-3", "doc", Integer.toString(i)).get(); - } - refresh(); - assertThat(client.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), equalTo(50L)); - assertThat(client.prepareSearch("test-idx-2").setSize(0).get().getHits().getTotalHits(), equalTo(50L)); - assertThat(client.prepareSearch("test-idx-3").setSize(0).get().getHits().getTotalHits(), equalTo(50L)); - - logger.info("--> close indices"); - client.admin().indices().prepareClose("test-idx-1", "test-idx-2").get(); - - logger.info("--> restore all indices from the snapshot"); - RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet(); - assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); - - ensureGreen(); - assertThat(client.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), equalTo(100L)); - assertThat(client.prepareSearch("test-idx-2").setSize(0).get().getHits().getTotalHits(), equalTo(100L)); - assertThat(client.prepareSearch("test-idx-3").setSize(0).get().getHits().getTotalHits(), equalTo(50L)); - - // Test restore after index deletion - logger.info("--> delete indices"); - cluster().wipeIndices("test-idx-1", "test-idx-2"); - logger.info("--> restore one index after deletion"); - restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx-*", "-test-idx-2").execute().actionGet(); - assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); - ensureGreen(); - assertThat(client.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), equalTo(100L)); - ClusterState clusterState = client.admin().cluster().prepareState().get().getState(); - assertThat(clusterState.getMetaData().hasIndex("test-idx-1"), equalTo(true)); - assertThat(clusterState.getMetaData().hasIndex("test-idx-2"), equalTo(false)); - } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch-cloud-aws/issues/211") public void testEncryption() { Client client = client(); @@ -179,7 +103,7 @@ public void testEncryption() { Settings settings = internalCluster().getInstance(Settings.class); Settings bucket = settings.getByPrefix("repositories.s3."); - try (AmazonS3Reference s3Client = internalCluster().getInstance(AwsS3Service.class).client("default")) { + try (AmazonS3Reference s3Client = internalCluster().getInstance(S3Service.class).client("default")) { String bucketName = bucket.get("bucket"); logger.info("--> verify encryption for bucket [{}], prefix [{}]", bucketName, basePath); List summaries = s3Client.client().listObjects(bucketName, basePath).getObjectSummaries(); @@ -442,7 +366,7 @@ public void cleanRepositoryFiles(String basePath) { // We check that settings has been set in elasticsearch.yml integration test file // as described in README assertThat("Your settings in elasticsearch.yml are incorrect. Check README file.", bucketName, notNullValue()); - try (AmazonS3Reference s3Client = internalCluster().getInstance(AwsS3Service.class).client("default")) { + try (AmazonS3Reference s3Client = internalCluster().getInstance(S3Service.class).client("default")) { ObjectListing prevListing = null; //From http://docs.amazonwebservices.com/AmazonS3/latest/dev/DeletingMultipleObjectsUsingJava.html //we can do at most 1K objects per delete diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java index 6f55f3ed345df..0c14f44d8b613 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java @@ -40,8 +40,8 @@ public class AwsS3ServiceImplTests extends ESTestCase { public void testAWSCredentialsDefaultToInstanceProviders() { final String inexistentClientName = randomAlphaOfLength(8).toLowerCase(Locale.ROOT); final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(Settings.EMPTY, inexistentClientName); - final AWSCredentialsProvider credentialsProvider = InternalAwsS3Service.buildCredentials(logger, clientSettings); - assertThat(credentialsProvider, instanceOf(InternalAwsS3Service.PrivilegedInstanceProfileCredentialsProvider.class)); + final AWSCredentialsProvider credentialsProvider = S3Service.buildCredentials(logger, clientSettings); + assertThat(credentialsProvider, instanceOf(S3Service.PrivilegedInstanceProfileCredentialsProvider.class)); } public void testAWSCredentialsFromKeystore() { @@ -60,15 +60,15 @@ public void testAWSCredentialsFromKeystore() { for (int i = 0; i < clientsCount; i++) { final String clientName = clientNamePrefix + i; final S3ClientSettings someClientSettings = allClientsSettings.get(clientName); - final AWSCredentialsProvider credentialsProvider = InternalAwsS3Service.buildCredentials(logger, someClientSettings); + final AWSCredentialsProvider credentialsProvider = S3Service.buildCredentials(logger, someClientSettings); assertThat(credentialsProvider, instanceOf(StaticCredentialsProvider.class)); assertThat(credentialsProvider.getCredentials().getAWSAccessKeyId(), is(clientName + "_aws_access_key")); assertThat(credentialsProvider.getCredentials().getAWSSecretKey(), is(clientName + "_aws_secret_key")); } // test default exists and is an Instance provider final S3ClientSettings defaultClientSettings = allClientsSettings.get("default"); - final AWSCredentialsProvider defaultCredentialsProvider = InternalAwsS3Service.buildCredentials(logger, defaultClientSettings); - assertThat(defaultCredentialsProvider, instanceOf(InternalAwsS3Service.PrivilegedInstanceProfileCredentialsProvider.class)); + final AWSCredentialsProvider defaultCredentialsProvider = S3Service.buildCredentials(logger, defaultClientSettings); + assertThat(defaultCredentialsProvider, instanceOf(S3Service.PrivilegedInstanceProfileCredentialsProvider.class)); } public void testSetDefaultCredential() { @@ -82,7 +82,7 @@ public void testSetDefaultCredential() { assertThat(allClientsSettings.size(), is(1)); // test default exists and is an Instance provider final S3ClientSettings defaultClientSettings = allClientsSettings.get("default"); - final AWSCredentialsProvider defaultCredentialsProvider = InternalAwsS3Service.buildCredentials(logger, defaultClientSettings); + final AWSCredentialsProvider defaultCredentialsProvider = S3Service.buildCredentials(logger, defaultClientSettings); assertThat(defaultCredentialsProvider, instanceOf(StaticCredentialsProvider.class)); assertThat(defaultCredentialsProvider.getCredentials().getAWSAccessKeyId(), is(awsAccessKey)); assertThat(defaultCredentialsProvider.getCredentials().getAWSSecretKey(), is(awsSecretKey)); @@ -152,7 +152,7 @@ private void launchAWSConfigurationTest(Settings settings, int expectedReadTimeout) { final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, "default"); - final ClientConfiguration configuration = InternalAwsS3Service.buildConfiguration(clientSettings); + final ClientConfiguration configuration = S3Service.buildConfiguration(clientSettings); assertThat(configuration.getResponseMetadataCacheSize(), is(0)); assertThat(configuration.getProtocol(), is(expectedProtocol)); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index f3bd894977999..744a27dc48e32 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -63,9 +63,9 @@ public boolean doesBucketExist(String bucketName) { } } - static final class ProxyInternalAwsS3Service extends InternalAwsS3Service { + static final class ProxyS3Service extends S3Service { - ProxyInternalAwsS3Service(Settings settings) { + ProxyS3Service(Settings settings) { super(settings); } @@ -77,15 +77,9 @@ AmazonS3 buildClient(AWSCredentialsProvider credentials, ClientConfiguration con } - protected ProxyS3RepositoryPlugin(Settings settings) { - super(settings); + ProxyS3RepositoryPlugin(Settings settings) { + super(settings, new ProxyS3Service(settings)); } - - @Override - protected AwsS3Service getAwsS3Service(Settings settings) { - return new ProxyInternalAwsS3Service(settings); - } - } public void testRepositoryCredentialsOverrideSecureCredentials() throws IOException { @@ -108,7 +102,7 @@ public void testRepositoryCredentialsOverrideSecureCredentials() throws IOExcept .put(S3Repository.ACCESS_KEY_SETTING.getKey(), "insecure_aws_key") .put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret").build()); try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(settings); - S3Repository s3repo = s3Plugin.getS3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); + S3Repository s3repo = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials.getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); @@ -131,7 +125,7 @@ public void testRepositoryCredentialsOnly() throws IOException { .put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret") .build()); try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(Settings.EMPTY); - S3Repository s3repo = s3Plugin.getS3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); + S3Repository s3repo = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials.getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); @@ -162,7 +156,7 @@ public void testReinitSecureCredentials() throws IOException { } final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", builder.build()); try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(settings); - S3Repository s3repo = s3Plugin.getS3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY)) { + S3Repository s3repo = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY)) { try (AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials .getCredentials(); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 2843390f1aa80..b061e8e45edee 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -110,14 +110,14 @@ public TestS3RepositoryPlugin(final Settings settings) { @Override public Map getRepositories(final Environment env, final NamedXContentRegistry registry) { return Collections.singletonMap(S3Repository.TYPE, - (metadata) -> new S3Repository(metadata, env.settings(), registry, new InternalAwsS3Service(env.settings()) { + (metadata) -> new S3Repository(metadata, env.settings(), registry, new S3Service(env.settings()) { @Override public synchronized AmazonS3Reference client(String clientName) { return new AmazonS3Reference(new MockAmazonS3(blobs, bucket, serverSideEncryption, cannedACL, storageClass)); } }) { @Override - void overrideCredentialsFromClusterState(AwsS3Service awsService) { + void overrideCredentialsFromClusterState(S3Service awsService) { } }); } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java index a44946b6b3ffa..55df03ff34a3f 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java @@ -117,7 +117,7 @@ public static S3BlobStore randomMockS3BlobStore() { final String theClientName = randomAlphaOfLength(4); final AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass); - final AwsS3Service service = new InternalAwsS3Service(Settings.EMPTY) { + final S3Service service = new S3Service(Settings.EMPTY) { @Override public synchronized AmazonS3Reference client(String clientName) { assert theClientName.equals(clientName); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index 5c0aada66585c..14f53ae5d3397 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -20,9 +20,7 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.services.s3.AbstractAmazonS3; - import org.elasticsearch.cluster.metadata.RepositoryMetaData; -import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -30,6 +28,7 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; + import java.io.IOException; import java.util.Collections; import java.util.Map; @@ -51,16 +50,11 @@ public void shutdown() { } } - private static class DummyS3Service extends AbstractLifecycleComponent implements AwsS3Service { + private static class DummyS3Service extends S3Service { DummyS3Service() { super(Settings.EMPTY); } - @Override - protected void doStart() {} - @Override - protected void doStop() {} - @Override - protected void doClose() {} + @Override public AmazonS3Reference client(String clientName) { return new AmazonS3Reference(new DummyS3Client()); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAwsS3Service.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAwsS3Service.java index f376f73820624..828d8ef850462 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAwsS3Service.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAwsS3Service.java @@ -24,10 +24,10 @@ import com.amazonaws.services.s3.AmazonS3; import org.elasticsearch.common.settings.Settings; -public class TestAwsS3Service extends InternalAwsS3Service { +public class TestAwsS3Service extends S3Service { public static class TestPlugin extends S3RepositoryPlugin { public TestPlugin(Settings settings) { - super(new TestAwsS3Service(settings)); + super(settings, new TestAwsS3Service(settings)); } } diff --git a/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java similarity index 97% rename from qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java rename to qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index 2d3f55ab94bb4..49a9dec870e75 100644 --- a/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.bwc; +package org.elasticsearch.upgrades; import org.apache.http.util.EntityUtils; import org.elasticsearch.Version; @@ -153,6 +153,21 @@ protected boolean preserveIndicesUponCompletion() { return true; } + @Override + protected boolean preserveSnapshotsUponCompletion() { + return true; + } + + @Override + protected boolean preserveReposUponCompletion() { + return true; + } + + @Override + protected boolean preserveTemplatesUponCompletion() { + return true; + } + public void testQueryBuilderBWC() throws Exception { String index = "queries"; if (runningAgainstOldCluster) { diff --git a/qa/query-builder-bwc/build.gradle b/qa/query-builder-bwc/build.gradle deleted file mode 100644 index c0144041e1a46..0000000000000 --- a/qa/query-builder-bwc/build.gradle +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.elasticsearch.gradle.test.RestIntegTestTask -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.VersionProperties - -apply plugin: 'elasticsearch.standalone-test' - -// This is a top level task which we will add dependencies to below. -// It is a single task that can be used to backcompat tests against all versions. -task bwcTest { - description = 'Runs backwards compatibility tests.' - group = 'verification' -} - -for (Version version : bwcVersions.indexCompatible) { - String baseName = "v${version}" - - Task oldQueryBuilderTest = tasks.create(name: "${baseName}#oldQueryBuilderTest", type: RestIntegTestTask) { - mustRunAfter(precommit) - } - tasks.getByName("${baseName}#oldQueryBuilderTestRunner").configure { - systemProperty 'tests.is_old_cluster', 'true' - systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT") - } - - configure(extensions.findByName("${baseName}#oldQueryBuilderTestCluster")) { - bwcVersion = version - numBwcNodes = 1 - numNodes = 1 - clusterName = 'query_builder_bwc' - setting 'http.content_type.required', 'true' - } - - Task upgradedQueryBuilderTest = tasks.create(name: "${baseName}#upgradedQueryBuilderTest", type: RestIntegTestTask) { - dependsOn(oldQueryBuilderTest, "${baseName}#oldQueryBuilderTestCluster#stop") - } - - configure(extensions.findByName("${baseName}#upgradedQueryBuilderTestCluster")) { - dependsOn oldQueryBuilderTest, - "${baseName}#oldQueryBuilderTestCluster#stop" - clusterName = 'query_builder_bwc' - numNodes = 1 - dataDir = { nodeNum -> oldQueryBuilderTest.nodes[nodeNum].dataDir } - cleanShared = false // We want to keep snapshots made by the old cluster! - } - - tasks.getByName("${baseName}#upgradedQueryBuilderTestRunner").configure { - systemProperty 'tests.is_old_cluster', 'false' - systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT") - } - - Task versionBwcTest = tasks.create(name: "${baseName}#bwcTest") { - dependsOn = [upgradedQueryBuilderTest] - } - - if (project.bwc_tests_enabled) { - bwcTest.dependsOn(versionBwcTest) - } -} - -test.enabled = false // no unit tests for rolling upgrades, only the rest integration test - -// basic integ tests includes testing bwc against the most recent version -task integTest { - if (project.bwc_tests_enabled) { - final def version = bwcVersions.snapshotsIndexCompatible.first() - dependsOn "v${version}#bwcTest" - } -} - -check.dependsOn(integTest) diff --git a/qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats b/qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats index 514091409334e..b20e756e701f4 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/70_sysv_initd.bats @@ -178,8 +178,7 @@ setup() { } # Ensures that if $MAX_MAP_COUNT is greater than the set vaule on the OS -# we do not attempt to update it this should cover equality as well as I think -# we can trust that equality operators work as intended. +# we do not attempt to update it. @test "[INIT.D] sysctl is not run when it already has a larger or equal value set" { # intentionally set to the default +1 sysctl -q -w vm.max_map_count=262145 diff --git a/server/build.gradle b/server/build.gradle index 7e880e0dae4d2..3055c625ea914 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -332,7 +332,7 @@ if (isEclipse == false || project.path == ":server-tests") { dependsOn: test.dependsOn) { configure(BuildPlugin.commonTestConfig(project)) classpath = project.test.classpath - testClassesDir = project.test.testClassesDir + testClassesDirs = project.test.testClassesDirs include '**/*IT.class' } check.dependsOn integTest diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index 5d5f4685f03d2..2ff01ab01ed1f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -28,14 +28,17 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.Strings.EMPTY_ARRAY; @@ -58,7 +61,8 @@ *
  • must not contain invalid file name characters {@link org.elasticsearch.common.Strings#INVALID_FILENAME_CHARS}
  • * */ -public class CreateSnapshotRequest extends MasterNodeRequest implements IndicesRequest.Replaceable { +public class CreateSnapshotRequest extends MasterNodeRequest + implements IndicesRequest.Replaceable, ToXContentObject { private String snapshot; @@ -407,6 +411,34 @@ public CreateSnapshotRequest source(Map source) { return this; } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("repository", repository); + builder.field("snapshot", snapshot); + builder.startArray("indices"); + for (String index : indices) { + builder.value(index); + } + builder.endArray(); + builder.field("partial", partial); + if (settings != null) { + builder.startObject("settings"); + if (settings.isEmpty() == false) { + settings.toXContent(builder, params); + } + builder.endObject(); + } + builder.field("include_global_state", includeGlobalState); + if (indicesOptions != null) { + indicesOptions.toXContent(builder, params); + } + builder.field("wait_for_completion", waitForCompletion); + builder.field("master_node_timeout", masterNodeTimeout.toString()); + builder.endObject(); + return builder; + } + @Override public void readFrom(StreamInput in) throws IOException { throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); @@ -416,4 +448,42 @@ public void readFrom(StreamInput in) throws IOException { public String getDescription() { return "snapshot [" + repository + ":" + snapshot + "]"; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateSnapshotRequest that = (CreateSnapshotRequest) o; + return partial == that.partial && + includeGlobalState == that.includeGlobalState && + waitForCompletion == that.waitForCompletion && + Objects.equals(snapshot, that.snapshot) && + Objects.equals(repository, that.repository) && + Arrays.equals(indices, that.indices) && + Objects.equals(indicesOptions, that.indicesOptions) && + Objects.equals(settings, that.settings) && + Objects.equals(masterNodeTimeout, that.masterNodeTimeout); + } + + @Override + public int hashCode() { + int result = Objects.hash(snapshot, repository, indicesOptions, partial, settings, includeGlobalState, waitForCompletion); + result = 31 * result + Arrays.hashCode(indices); + return result; + } + + @Override + public String toString() { + return "CreateSnapshotRequest{" + + "snapshot='" + snapshot + '\'' + + ", repository='" + repository + '\'' + + ", indices=" + (indices == null ? null : Arrays.asList(indices)) + + ", indicesOptions=" + indicesOptions + + ", partial=" + partial + + ", settings=" + settings + + ", includeGlobalState=" + includeGlobalState + + ", waitForCompletion=" + waitForCompletion + + ", masterNodeTimeout=" + masterNodeTimeout + + '}'; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java index 1f9f77f9ed3df..a2dc02c5c8299 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java @@ -25,10 +25,13 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.SnapshotInfo; import java.io.IOException; +import java.util.Objects; /** * Create snapshot response @@ -45,6 +48,10 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent CreateSnapshotResponse() { } + void setSnapshotInfo(SnapshotInfo snapshotInfo) { + this.snapshotInfo = snapshotInfo; + } + /** * Returns snapshot information if snapshot was completed by the time this method returned or null otherwise. * @@ -93,4 +100,58 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + public static CreateSnapshotResponse fromXContent(XContentParser parser) throws IOException { + CreateSnapshotResponse createSnapshotResponse = new CreateSnapshotResponse(); + + parser.nextToken(); // move to '{' + + if (parser.currentToken() != Token.START_OBJECT) { + throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['{']"); + } + + parser.nextToken(); // move to 'snapshot' || 'accepted' + + if ("snapshot".equals(parser.currentName())) { + createSnapshotResponse.snapshotInfo = SnapshotInfo.fromXContent(parser); + } else if ("accepted".equals(parser.currentName())) { + parser.nextToken(); // move to 'accepted' field value + + if (parser.booleanValue()) { + // ensure accepted is a boolean value + } + + parser.nextToken(); // move past 'true'/'false' + } else { + throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "] expected ['snapshot', 'accepted']"); + } + + if (parser.currentToken() != Token.END_OBJECT) { + throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['}']"); + } + + parser.nextToken(); // move past '}' + + return createSnapshotResponse; + } + + @Override + public String toString() { + return "CreateSnapshotResponse{" + + "snapshotInfo=" + snapshotInfo + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateSnapshotResponse that = (CreateSnapshotResponse) o; + return Objects.equals(snapshotInfo, that.snapshotInfo); + } + + @Override + public int hashCode() { + return Objects.hash(snapshotInfo); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java index 0d1e5eda7f2d2..6f757cb60ca86 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java @@ -20,23 +20,37 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.snapshots.SnapshotInfo; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; /** * Get snapshots response */ public class GetSnapshotsResponse extends ActionResponse implements ToXContentObject { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser GET_SNAPSHOT_PARSER = + new ConstructingObjectParser<>(GetSnapshotsResponse.class.getName(), true, + (args) -> new GetSnapshotsResponse((List) args[0])); + + static { + GET_SNAPSHOT_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), + (p, c) -> SnapshotInfo.SNAPSHOT_INFO_PARSER.apply(p, c).build(), new ParseField("snapshots")); + } + private List snapshots = Collections.emptyList(); GetSnapshotsResponse() { @@ -87,4 +101,20 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par return builder; } + public static GetSnapshotsResponse fromXContent(XContentParser parser) throws IOException { + return GET_SNAPSHOT_PARSER.parse(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSnapshotsResponse that = (GetSnapshotsResponse) o; + return Objects.equals(snapshots, that.snapshots); + } + + @Override + public int hashCode() { + return Objects.hash(snapshots); + } } diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java index 5d8ca27657f92..6fdf355c0670c 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java @@ -22,9 +22,12 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.single.shard.SingleShardRequest; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.internal.AliasFilter; @@ -34,7 +37,9 @@ /** * Explain request encapsulating the explain query and document identifier to get an explanation for. */ -public class ExplainRequest extends SingleShardRequest { +public class ExplainRequest extends SingleShardRequest implements ToXContentObject { + + private static final ParseField QUERY_FIELD = new ParseField("query"); private String type = "_all"; private String id; @@ -186,4 +191,12 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(fetchSourceContext); out.writeVLong(nowInMillis); } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(QUERY_FIELD.getPreferredName(), query); + builder.endObject(); + return builder; + } } diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java index fb1fc3db1ea18..0dc75e41439d2 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java @@ -21,11 +21,19 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.StatusToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.Collection; +import java.util.Objects; import static org.elasticsearch.common.lucene.Lucene.readExplanation; import static org.elasticsearch.common.lucene.Lucene.writeExplanation; @@ -33,7 +41,17 @@ /** * Response containing the score explanation. */ -public class ExplainResponse extends ActionResponse { +public class ExplainResponse extends ActionResponse implements StatusToXContentObject { + + private static final ParseField _INDEX = new ParseField("_index"); + private static final ParseField _TYPE = new ParseField("_type"); + private static final ParseField _ID = new ParseField("_id"); + private static final ParseField MATCHED = new ParseField("matched"); + private static final ParseField EXPLANATION = new ParseField("explanation"); + private static final ParseField VALUE = new ParseField("value"); + private static final ParseField DESCRIPTION = new ParseField("description"); + private static final ParseField DETAILS = new ParseField("details"); + private static final ParseField GET = new ParseField("get"); private String index; private String type; @@ -94,6 +112,11 @@ public GetResult getGetResult() { return getResult; } + @Override + public RestStatus status() { + return exists ? RestStatus.OK : RestStatus.NOT_FOUND; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -129,4 +152,90 @@ public void writeTo(StreamOutput out) throws IOException { getResult.writeTo(out); } } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("explain", true, + (arg, exists) -> new ExplainResponse((String) arg[0], (String) arg[1], (String) arg[2], exists, (Explanation) arg[3], + (GetResult) arg[4])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), _INDEX); + PARSER.declareString(ConstructingObjectParser.constructorArg(), _TYPE); + PARSER.declareString(ConstructingObjectParser.constructorArg(), _ID); + final ConstructingObjectParser explanationParser = new ConstructingObjectParser<>("explanation", true, + arg -> { + if ((float) arg[0] > 0) { + return Explanation.match((float) arg[0], (String) arg[1], (Collection) arg[2]); + } else { + return Explanation.noMatch((String) arg[1], (Collection) arg[2]); + } + }); + explanationParser.declareFloat(ConstructingObjectParser.constructorArg(), VALUE); + explanationParser.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION); + explanationParser.declareObjectArray(ConstructingObjectParser.constructorArg(), explanationParser, DETAILS); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), explanationParser, EXPLANATION); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> GetResult.fromXContentEmbedded(p), GET); + } + + public static ExplainResponse fromXContent(XContentParser parser, boolean exists) { + return PARSER.apply(parser, exists); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(_INDEX.getPreferredName(), index); + builder.field(_TYPE.getPreferredName(), type); + builder.field(_ID.getPreferredName(), id); + builder.field(MATCHED.getPreferredName(), isMatch()); + if (hasExplanation()) { + builder.startObject(EXPLANATION.getPreferredName()); + buildExplanation(builder, explanation); + builder.endObject(); + } + if (getResult != null) { + builder.startObject(GET.getPreferredName()); + getResult.toXContentEmbedded(builder, params); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException { + builder.field(VALUE.getPreferredName(), explanation.getValue()); + builder.field(DESCRIPTION.getPreferredName(), explanation.getDescription()); + Explanation[] innerExps = explanation.getDetails(); + if (innerExps != null) { + builder.startArray(DETAILS.getPreferredName()); + for (Explanation exp : innerExps) { + builder.startObject(); + buildExplanation(builder, exp); + builder.endObject(); + } + builder.endArray(); + } + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + ExplainResponse other = (ExplainResponse) obj; + return index.equals(other.index) + && type.equals(other.type) + && id.equals(other.id) + && Objects.equals(explanation, other.explanation) + && getResult.isExists() == other.getResult.isExists() + && Objects.equals(getResult.sourceAsMap(), other.getResult.sourceAsMap()) + && Objects.equals(getResult.getFields(), other.getResult.getFields()); + } + + @Override + public int hashCode() { + return Objects.hash(index, type, id, explanation, getResult.isExists(), getResult.sourceAsMap(), getResult.getFields()); + } } diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java index c7c711253baeb..056c4c29c7a3d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java @@ -270,42 +270,7 @@ public static byte[] writeMultiLineFormat(MultiSearchRequest multiSearchRequest, ByteArrayOutputStream output = new ByteArrayOutputStream(); for (SearchRequest request : multiSearchRequest.requests()) { try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) { - xContentBuilder.startObject(); - if (request.indices() != null) { - xContentBuilder.field("index", request.indices()); - } - if (request.indicesOptions() != null && request.indicesOptions() != SearchRequest.DEFAULT_INDICES_OPTIONS) { - if (request.indicesOptions().expandWildcardsOpen() && request.indicesOptions().expandWildcardsClosed()) { - xContentBuilder.field("expand_wildcards", "all"); - } else if (request.indicesOptions().expandWildcardsOpen()) { - xContentBuilder.field("expand_wildcards", "open"); - } else if (request.indicesOptions().expandWildcardsClosed()) { - xContentBuilder.field("expand_wildcards", "closed"); - } else { - xContentBuilder.field("expand_wildcards", "none"); - } - xContentBuilder.field("ignore_unavailable", request.indicesOptions().ignoreUnavailable()); - xContentBuilder.field("allow_no_indices", request.indicesOptions().allowNoIndices()); - } - if (request.types() != null) { - xContentBuilder.field("types", request.types()); - } - if (request.searchType() != null) { - xContentBuilder.field("search_type", request.searchType().name().toLowerCase(Locale.ROOT)); - } - if (request.requestCache() != null) { - xContentBuilder.field("request_cache", request.requestCache()); - } - if (request.preference() != null) { - xContentBuilder.field("preference", request.preference()); - } - if (request.routing() != null) { - xContentBuilder.field("routing", request.routing()); - } - if (request.allowPartialSearchResults() != null) { - xContentBuilder.field("allow_partial_search_results", request.allowPartialSearchResults()); - } - xContentBuilder.endObject(); + writeSearchRequestParams(request, xContentBuilder); BytesReference.bytes(xContentBuilder).writeTo(output); } output.write(xContent.streamSeparator()); @@ -322,5 +287,44 @@ public static byte[] writeMultiLineFormat(MultiSearchRequest multiSearchRequest, } return output.toByteArray(); } + + public static void writeSearchRequestParams(SearchRequest request, XContentBuilder xContentBuilder) throws IOException { + xContentBuilder.startObject(); + if (request.indices() != null) { + xContentBuilder.field("index", request.indices()); + } + if (request.indicesOptions() != null && request.indicesOptions() != SearchRequest.DEFAULT_INDICES_OPTIONS) { + if (request.indicesOptions().expandWildcardsOpen() && request.indicesOptions().expandWildcardsClosed()) { + xContentBuilder.field("expand_wildcards", "all"); + } else if (request.indicesOptions().expandWildcardsOpen()) { + xContentBuilder.field("expand_wildcards", "open"); + } else if (request.indicesOptions().expandWildcardsClosed()) { + xContentBuilder.field("expand_wildcards", "closed"); + } else { + xContentBuilder.field("expand_wildcards", "none"); + } + xContentBuilder.field("ignore_unavailable", request.indicesOptions().ignoreUnavailable()); + xContentBuilder.field("allow_no_indices", request.indicesOptions().allowNoIndices()); + } + if (request.types() != null) { + xContentBuilder.field("types", request.types()); + } + if (request.searchType() != null) { + xContentBuilder.field("search_type", request.searchType().name().toLowerCase(Locale.ROOT)); + } + if (request.requestCache() != null) { + xContentBuilder.field("request_cache", request.requestCache()); + } + if (request.preference() != null) { + xContentBuilder.field("preference", request.preference()); + } + if (request.routing() != null) { + xContentBuilder.field("routing", request.routing()); + } + if (request.allowPartialSearchResults() != null) { + xContentBuilder.field("allow_partial_search_results", request.allowPartialSearchResults()); + } + xContentBuilder.endObject(); + } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index a390538ec2978..e67517c4852b8 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -60,6 +60,7 @@ public final class SearchRequest extends ActionRequest implements IndicesRequest private static final ToXContent.Params FORMAT_PARAMS = new ToXContent.MapParams(Collections.singletonMap("pretty", "false")); public static final int DEFAULT_PRE_FILTER_SHARD_SIZE = 128; + public static final int DEFAULT_BATCHED_REDUCE_SIZE = 512; private SearchType searchType = SearchType.DEFAULT; @@ -79,7 +80,7 @@ public final class SearchRequest extends ActionRequest implements IndicesRequest private Scroll scroll; - private int batchedReduceSize = 512; + private int batchedReduceSize = DEFAULT_BATCHED_REDUCE_SIZE; private int maxConcurrentShardRequests = 0; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 9ad8a20cb1770..2a97798764e59 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -422,7 +422,7 @@ public static class Clusters implements ToXContent, Writeable { private final int successful; private final int skipped; - Clusters(int total, int successful, int skipped) { + public Clusters(int total, int successful, int skipped) { assert total >= 0 && successful >= 0 && skipped >= 0 : "total: " + total + " successful: " + successful + " skipped: " + skipped; assert successful <= total && skipped == total - successful diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchType.java b/server/src/main/java/org/elasticsearch/action/search/SearchType.java index b800120408739..910bc3d676a05 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchType.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchType.java @@ -50,6 +50,11 @@ public enum SearchType { */ public static final SearchType DEFAULT = QUERY_THEN_FETCH; + /** + * Non-deprecated types + */ + public static final SearchType [] CURRENTLY_SUPPORTED = {QUERY_THEN_FETCH, DFS_QUERY_THEN_FETCH}; + private byte id; SearchType(byte id) { @@ -94,4 +99,5 @@ public static SearchType fromString(String searchType) { throw new IllegalArgumentException("No search type for [" + searchType + "]"); } } + } diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index b284ec87dd42c..93641574bde12 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -22,12 +22,15 @@ import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestRequest; import java.io.IOException; import java.util.Collection; import java.util.EnumSet; import java.util.HashSet; +import java.util.Locale; import java.util.Map; import java.util.Set; @@ -38,7 +41,7 @@ * Controls how to deal with unavailable concrete indices (closed or missing), how wildcard expressions are expanded * to actual indices (all, closed or open indices) and how to deal with wildcard expressions that resolve to no indices. */ -public class IndicesOptions { +public class IndicesOptions implements ToXContentFragment { public enum WildcardStates { OPEN, @@ -313,6 +316,18 @@ public static IndicesOptions fromMap(Map map, IndicesOptions def defaultSettings); } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray("expand_wildcards"); + for (WildcardStates expandWildcard : expandWildcards) { + builder.value(expandWildcard.toString().toLowerCase(Locale.ROOT)); + } + builder.endArray(); + builder.field("ignore_unavailable", ignoreUnavailable()); + builder.field("allow_no_indices", allowNoIndices()); + return builder; + } + /** * Returns true if the name represents a valid name for one of the indices option * false otherwise diff --git a/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java index e048512e6382c..237b36b53d4bc 100644 --- a/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java +++ b/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java @@ -29,19 +29,17 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.OutputStreamIndexOutput; import org.apache.lucene.store.SimpleFSDirectory; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.internal.io.IOUtils; import java.io.FileNotFoundException; import java.io.IOException; @@ -54,7 +52,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -70,9 +67,8 @@ public abstract class MetaDataStateFormat { public static final String STATE_FILE_EXTENSION = ".st"; private static final String STATE_FILE_CODEC = "state"; - private static final int MIN_COMPATIBLE_STATE_FILE_VERSION = 0; + private static final int MIN_COMPATIBLE_STATE_FILE_VERSION = 1; private static final int STATE_FILE_VERSION = 1; - private static final int STATE_FILE_VERSION_ES_2X_AND_BELOW = 0; private static final int BUFFER_SIZE = 4096; private final String prefix; private final Pattern stateFilePattern; @@ -186,16 +182,11 @@ public final T read(NamedXContentRegistry namedXContentRegistry, Path file) thro try (IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); - final int fileVersion = CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION, - STATE_FILE_VERSION); + CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION, STATE_FILE_VERSION); final XContentType xContentType = XContentType.values()[indexInput.readInt()]; if (xContentType != FORMAT) { throw new IllegalStateException("expected state in " + file + " to be " + FORMAT + " format but was " + xContentType); } - if (fileVersion == STATE_FILE_VERSION_ES_2X_AND_BELOW) { - // format version 0, wrote a version that always came from the content state file and was never used - indexInput.readLong(); // version currently unused - } long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) { @@ -263,10 +254,9 @@ long findMaxStateId(final String prefix, Path... locations) throws IOException { * @param dataLocations the data-locations to try. * @return the latest state or null if no state was found. */ - public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegistry, Path... dataLocations) throws IOException { + public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegistry, Path... dataLocations) throws IOException { List files = new ArrayList<>(); long maxStateId = -1; - boolean maxStateIdIsLegacy = true; if (dataLocations != null) { // select all eligible files first for (Path dataLocation : dataLocations) { final Path stateDir = dataLocation.resolve(STATE_DIR_NAME); @@ -280,9 +270,7 @@ public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegi if (matcher.matches()) { final long stateId = Long.parseLong(matcher.group(1)); maxStateId = Math.max(maxStateId, stateId); - final boolean legacy = MetaDataStateFormat.STATE_FILE_EXTENSION.equals(matcher.group(2)) == false; - maxStateIdIsLegacy &= legacy; // on purpose, see NOTE below - PathAndStateId pav = new PathAndStateId(stateFile, stateId, legacy); + PathAndStateId pav = new PathAndStateId(stateFile, stateId); logger.trace("found state file: {}", pav); files.add(pav); } @@ -292,39 +280,19 @@ public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegi } } } - final List exceptions = new ArrayList<>(); - T state = null; // NOTE: we might have multiple version of the latest state if there are multiple data dirs.. for this case - // we iterate only over the ones with the max version. If we have at least one state file that uses the - // new format (ie. legacy == false) then we know that the latest version state ought to use this new format. - // In case the state file with the latest version does not use the new format while older state files do, - // the list below will be empty and loading the state will fail + // we iterate only over the ones with the max version. + long finalMaxStateId = maxStateId; Collection pathAndStateIds = files .stream() - .filter(new StateIdAndLegacyPredicate(maxStateId, maxStateIdIsLegacy)) + .filter(pathAndStateId -> pathAndStateId.id == finalMaxStateId) .collect(Collectors.toCollection(ArrayList::new)); + final List exceptions = new ArrayList<>(); for (PathAndStateId pathAndStateId : pathAndStateIds) { try { - final Path stateFile = pathAndStateId.file; - final long id = pathAndStateId.id; - if (pathAndStateId.legacy) { // read the legacy format -- plain XContent - final byte[] data = Files.readAllBytes(stateFile); - if (data.length == 0) { - logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath()); - continue; - } - try (XContentParser parser = XContentHelper - .createParser(namedXContentRegistry, LoggingDeprecationHandler.INSTANCE, new BytesArray(data))) { - state = fromXContent(parser); - } - if (state == null) { - logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath()); - } - } else { - state = read(namedXContentRegistry, stateFile); - logger.trace("state id [{}] read from [{}]", id, stateFile.getFileName()); - } + T state = read(namedXContentRegistry, pathAndStateId.file); + logger.trace("state id [{}] read from [{}]", pathAndStateId.id, pathAndStateId.file.getFileName()); return state; } catch (Exception e) { exceptions.add(new IOException("failed to read " + pathAndStateId.toString(), e)); @@ -338,46 +306,24 @@ public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegi // We have some state files but none of them gave us a usable state throw new IllegalStateException("Could not find a state file to recover from among " + files); } - return state; - } - - /** - * Filters out all {@link org.elasticsearch.gateway.MetaDataStateFormat.PathAndStateId} instances with a different id than - * the given one. - */ - private static final class StateIdAndLegacyPredicate implements Predicate { - private final long id; - private final boolean legacy; - - StateIdAndLegacyPredicate(long id, boolean legacy) { - this.id = id; - this.legacy = legacy; - } - - @Override - public boolean test(PathAndStateId input) { - return input.id == id && input.legacy == legacy; - } + return null; } /** - * Internal struct-like class that holds the parsed state id, the file - * and a flag if the file is a legacy state ie. pre 1.5 + * Internal struct-like class that holds the parsed state id and the file */ private static class PathAndStateId { final Path file; final long id; - final boolean legacy; - private PathAndStateId(Path file, long id, boolean legacy) { + private PathAndStateId(Path file, long id) { this.file = file; this.id = id; - this.legacy = legacy; } @Override public String toString() { - return "[id:" + id + ", legacy:" + legacy + ", file:" + file.toAbsolutePath() + "]"; + return "[id:" + id + ", file:" + file.toAbsolutePath() + "]"; } } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index b01a1363c1cf6..66d3eef23c7c4 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -257,7 +257,7 @@ protected void doStart() { indexMetaDataFormat = new ChecksumBlobStoreFormat<>(INDEX_METADATA_CODEC, METADATA_NAME_FORMAT, IndexMetaData::fromXContent, namedXContentRegistry, isCompress()); snapshotFormat = new ChecksumBlobStoreFormat<>(SNAPSHOT_CODEC, SNAPSHOT_NAME_FORMAT, - SnapshotInfo::fromXContent, namedXContentRegistry, isCompress()); + SnapshotInfo::fromXContentInternal, namedXContentRegistry, isCompress()); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java index b0adc27f447f5..d0196702d07ec 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java @@ -19,30 +19,22 @@ package org.elasticsearch.rest.action.search; -import org.apache.lucene.search.Explanation; import org.elasticsearch.action.explain.ExplainRequest; -import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestActions; -import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.rest.action.RestStatusToXContentListener; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestStatus.NOT_FOUND; -import static org.elasticsearch.rest.RestStatus.OK; /** * Rest action for computing a score explanation for specific documents. @@ -89,57 +81,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC explainRequest.fetchSourceContext(FetchSourceContext.parseFromRestRequest(request)); - return channel -> client.explain(explainRequest, new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(ExplainResponse response, XContentBuilder builder) throws Exception { - builder.startObject(); - builder.field(Fields._INDEX, response.getIndex()) - .field(Fields._TYPE, response.getType()) - .field(Fields._ID, response.getId()) - .field(Fields.MATCHED, response.isMatch()); - - if (response.hasExplanation()) { - builder.startObject(Fields.EXPLANATION); - buildExplanation(builder, response.getExplanation()); - builder.endObject(); - } - GetResult getResult = response.getGetResult(); - if (getResult != null) { - builder.startObject(Fields.GET); - response.getGetResult().toXContentEmbedded(builder, request); - builder.endObject(); - } - builder.endObject(); - return new BytesRestResponse(response.isExists() ? OK : NOT_FOUND, builder); - } - - private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException { - builder.field(Fields.VALUE, explanation.getValue()); - builder.field(Fields.DESCRIPTION, explanation.getDescription()); - Explanation[] innerExps = explanation.getDetails(); - if (innerExps != null) { - builder.startArray(Fields.DETAILS); - for (Explanation exp : innerExps) { - builder.startObject(); - buildExplanation(builder, exp); - builder.endObject(); - } - builder.endArray(); - } - } - }); - } - - static class Fields { - static final String _INDEX = "_index"; - static final String _TYPE = "_type"; - static final String _ID = "_id"; - static final String MATCHED = "matched"; - static final String EXPLANATION = "explanation"; - static final String VALUE = "value"; - static final String DESCRIPTION = "description"; - static final String DETAILS = "details"; - static final String GET = "get"; - + return channel -> client.explain(explainRequest, new RestStatusToXContentListener<>(channel)); } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 34b48fce941f8..8c688cbf4466a 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -497,8 +497,8 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t * This parser outputs a temporary map of the objects needed to create the * SearchHit instead of directly creating the SearchHit. The reason for this * is that this way we can reuse the parser when parsing xContent from - * {@link CompletionSuggestion.Entry.Option} which unfortunately inlines the - * output of + * {@link org.elasticsearch.search.suggest.completion.CompletionSuggestion.Entry.Option} which unfortunately inlines + * the output of * {@link #toInnerXContent(XContentBuilder, org.elasticsearch.common.xcontent.ToXContent.Params)} * of the included search hit. The output of the map is used to create the * actual SearchHit instance via {@link #createFromMap(Map)} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 2cde321230ebc..ab529ac033e73 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -200,7 +200,7 @@ public void collect(int doc, long bucket) throws IOException { /** * Replay the documents that might contain a top bucket and pass top buckets to - * the {@link this#deferredCollectors}. + * the {@link #deferredCollectors}. */ private void runDeferredCollections() throws IOException { final boolean needsScores = needsScores(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java index b7b29a8841489..38e22296333ae 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java @@ -49,7 +49,7 @@ final class CompositeValuesCollectorQueue implements Releasable { * * @param sources The list of {@link CompositeValuesSourceConfig} to build the composite buckets. * @param size The number of composite buckets to keep. - * @param afterKey + * @param afterKey composite key */ CompositeValuesCollectorQueue(BigArrays bigArrays, SingleDimensionValuesSource[] sources, int size, CompositeKey afterKey) { this.bigArrays = bigArrays; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java index 1718bb4204879..c73820fc32a78 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java @@ -74,7 +74,7 @@ abstract class SingleDimensionValuesSource> implements R * The current value is filled by a {@link LeafBucketCollector} that visits all the * values of each document. This method saves this current value in a slot and should only be used * in the context of a collection. - * See {@link this#getLeafCollector}. + * See {@link #getLeafCollector}. */ abstract void copyCurrent(int slot); @@ -87,7 +87,7 @@ abstract class SingleDimensionValuesSource> implements R * The current value is filled by a {@link LeafBucketCollector} that visits all the * values of each document. This method compares this current value with the value present in * the provided slot and should only be used in the context of a collection. - * See {@link this#getLeafCollector}. + * See {@link #getLeafCollector}. */ abstract int compareCurrent(int slot); @@ -95,7 +95,7 @@ abstract class SingleDimensionValuesSource> implements R * The current value is filled by a {@link LeafBucketCollector} that visits all the * values of each document. This method compares this current value with the after value * set on this source and should only be used in the context of a collection. - * See {@link this#getLeafCollector}. + * See {@link #getLeafCollector}. */ abstract int compareCurrentWithAfter(); @@ -120,7 +120,7 @@ T getAfter() { * Creates a {@link LeafBucketCollector} that extracts all values from a document and invokes * {@link LeafBucketCollector#collect} on the provided next collector for each of them. * The current value of this source is set on each call and can be accessed by next via - * the {@link this#copyCurrent(int)} and {@link this#compareCurrent(int)} methods. Note that these methods + * the {@link #copyCurrent(int)} and {@link #compareCurrent(int)} methods. Note that these methods * are only valid when invoked from the {@link LeafBucketCollector} created in this source. */ abstract LeafBucketCollector getLeafCollector(LeafReaderContext context, LeafBucketCollector next) throws IOException; diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java index 073007f4225df..a1f56a1e47376 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java @@ -23,12 +23,14 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -79,6 +81,155 @@ public final class SnapshotInfo implements Comparable, ToXContent, private static final Comparator COMPARATOR = Comparator.comparing(SnapshotInfo::startTime).thenComparing(SnapshotInfo::snapshotId); + public static final class SnapshotInfoBuilder { + private String snapshotName = null; + private String snapshotUUID = null; + private String state = null; + private String reason = null; + private List indices = null; + private long startTime = 0L; + private long endTime = 0L; + private ShardStatsBuilder shardStatsBuilder = null; + private Boolean includeGlobalState = null; + private int version = -1; + private List shardFailures = null; + + private void setSnapshotName(String snapshotName) { + this.snapshotName = snapshotName; + } + + private void setSnapshotUUID(String snapshotUUID) { + this.snapshotUUID = snapshotUUID; + } + + private void setState(String state) { + this.state = state; + } + + private void setReason(String reason) { + this.reason = reason; + } + + private void setIndices(List indices) { + this.indices = indices; + } + + private void setStartTime(long startTime) { + this.startTime = startTime; + } + + private void setEndTime(long endTime) { + this.endTime = endTime; + } + + private void setShardStatsBuilder(ShardStatsBuilder shardStatsBuilder) { + this.shardStatsBuilder = shardStatsBuilder; + } + + private void setIncludeGlobalState(Boolean includeGlobalState) { + this.includeGlobalState = includeGlobalState; + } + + private void setVersion(int version) { + this.version = version; + } + + private void setShardFailures(List shardFailures) { + this.shardFailures = shardFailures; + } + + private void ignoreVersion(String version) { + // ignore extra field + } + + private void ignoreStartTime(String startTime) { + // ignore extra field + } + + private void ignoreEndTime(String endTime) { + // ignore extra field + } + + private void ignoreDurationInMillis(long durationInMillis) { + // ignore extra field + } + + public SnapshotInfo build() { + SnapshotId snapshotId = new SnapshotId(snapshotName, snapshotUUID); + + if (indices == null) { + indices = Collections.emptyList(); + } + + SnapshotState snapshotState = state == null ? null : SnapshotState.valueOf(state); + Version version = this.version == -1 ? Version.CURRENT : Version.fromId(this.version); + + int totalShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.getTotalShards(); + int successfulShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.getSuccessfulShards(); + + if (shardFailures == null) { + shardFailures = new ArrayList<>(); + } + + return new SnapshotInfo(snapshotId, indices, snapshotState, reason, version, startTime, endTime, + totalShards, successfulShards, shardFailures, includeGlobalState); + } + } + + private static final class ShardStatsBuilder { + private int totalShards; + private int successfulShards; + + private void setTotalShards(int totalShards) { + this.totalShards = totalShards; + } + + int getTotalShards() { + return totalShards; + } + + private void setSuccessfulShards(int successfulShards) { + this.successfulShards = successfulShards; + } + + int getSuccessfulShards() { + return successfulShards; + } + + private void ignoreFailedShards(int failedShards) { + // ignore extra field + } + } + + public static final ObjectParser SNAPSHOT_INFO_PARSER = + new ObjectParser<>(SnapshotInfoBuilder.class.getName(), true, SnapshotInfoBuilder::new); + + private static final ObjectParser SHARD_STATS_PARSER = + new ObjectParser<>(ShardStatsBuilder.class.getName(), true, ShardStatsBuilder::new); + + static { + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotName, new ParseField(SNAPSHOT)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotUUID, new ParseField(UUID)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setState, new ParseField(STATE)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setReason, new ParseField(REASON)); + SNAPSHOT_INFO_PARSER.declareStringArray(SnapshotInfoBuilder::setIndices, new ParseField(INDICES)); + SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setStartTime, new ParseField(START_TIME_IN_MILLIS)); + SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setEndTime, new ParseField(END_TIME_IN_MILLIS)); + SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setShardStatsBuilder, SHARD_STATS_PARSER, new ParseField(SHARDS)); + SNAPSHOT_INFO_PARSER.declareBoolean(SnapshotInfoBuilder::setIncludeGlobalState, new ParseField(INCLUDE_GLOBAL_STATE)); + SNAPSHOT_INFO_PARSER.declareInt(SnapshotInfoBuilder::setVersion, new ParseField(VERSION_ID)); + SNAPSHOT_INFO_PARSER.declareObjectArray(SnapshotInfoBuilder::setShardFailures, SnapshotShardFailure.SNAPSHOT_SHARD_FAILURE_PARSER, + new ParseField(FAILURES)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreVersion, new ParseField(VERSION)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreStartTime, new ParseField(START_TIME)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreEndTime, new ParseField(END_TIME)); + SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::ignoreDurationInMillis, new ParseField(DURATION_IN_MILLIS)); + + SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setTotalShards, new ParseField(TOTAL)); + SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setSuccessfulShards, new ParseField(SUCCESSFUL)); + SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::ignoreFailedShards, new ParseField(FAILED)); + } + private final SnapshotId snapshotId; @Nullable @@ -317,29 +468,21 @@ public int compareTo(final SnapshotInfo o) { return COMPARATOR.compare(this, o); } - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - final SnapshotInfo that = (SnapshotInfo) o; - return startTime == that.startTime && snapshotId.equals(that.snapshotId); - } - - @Override - public int hashCode() { - int result = snapshotId.hashCode(); - result = 31 * result + Long.hashCode(startTime); - return result; - } - @Override public String toString() { - return "SnapshotInfo[snapshotId=" + snapshotId + ", state=" + state + ", indices=" + indices + "]"; + return "SnapshotInfo{" + + "snapshotId=" + snapshotId + + ", state=" + state + + ", reason='" + reason + '\'' + + ", indices=" + indices + + ", startTime=" + startTime + + ", endTime=" + endTime + + ", totalShards=" + totalShards + + ", successfulShards=" + successfulShards + + ", includeGlobalState=" + includeGlobalState + + ", version=" + version + + ", shardFailures=" + shardFailures + + '}'; } /** @@ -360,7 +503,7 @@ public RestStatus status() { public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { // write snapshot info to repository snapshot blob format if (CONTEXT_MODE_SNAPSHOT.equals(params.param(CONTEXT_MODE_PARAM))) { - return toXContentSnapshot(builder, params); + return toXContentInternal(builder, params); } final boolean verbose = params.paramAsBoolean("verbose", GetSnapshotsRequest.DEFAULT_VERBOSE_MODE); @@ -415,7 +558,7 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa return builder; } - private XContentBuilder toXContentSnapshot(final XContentBuilder builder, final ToXContent.Params params) throws IOException { + private XContentBuilder toXContentInternal(final XContentBuilder builder, final ToXContent.Params params) throws IOException { builder.startObject(SNAPSHOT); builder.field(NAME, snapshotId.getName()); builder.field(UUID, snapshotId.getUUID()); @@ -448,12 +591,20 @@ private XContentBuilder toXContentSnapshot(final XContentBuilder builder, final return builder; } + /** + * This method creates a SnapshotInfo from external x-content. It does not + * handle x-content written with the internal version. + */ + public static SnapshotInfo fromXContent(final XContentParser parser) throws IOException { + return SNAPSHOT_INFO_PARSER.parse(parser, null).build(); + } + /** * This method creates a SnapshotInfo from internal x-content. It does not * handle x-content written with the external version as external x-content * is only for display purposes and does not need to be parsed. */ - public static SnapshotInfo fromXContent(final XContentParser parser) throws IOException { + public static SnapshotInfo fromXContentInternal(final XContentParser parser) throws IOException { String name = null; String uuid = null; Version version = Version.CURRENT; @@ -607,4 +758,28 @@ private static SnapshotState snapshotState(final String reason, final List SNAPSHOT_SHARD_FAILURE_PARSER = + new ConstructingObjectParser<>("shard_failure", true, SnapshotShardFailure::constructSnapshotShardFailure); + + static { + SNAPSHOT_SHARD_FAILURE_PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("index")); + SNAPSHOT_SHARD_FAILURE_PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("index_uuid")); + SNAPSHOT_SHARD_FAILURE_PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("node_id")); + // Workaround for https://github.com/elastic/elasticsearch/issues/25878 + // Some old snapshot might still have null in shard failure reasons + SNAPSHOT_SHARD_FAILURE_PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), new ParseField("reason")); + SNAPSHOT_SHARD_FAILURE_PARSER.declareInt(ConstructingObjectParser.constructorArg(), new ParseField("shard_id")); + SNAPSHOT_SHARD_FAILURE_PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("status")); + } + + private static SnapshotShardFailure constructSnapshotShardFailure(Object[] args) { + String index = (String) args[0]; + String indexUuid = (String) args[1]; + String nodeId = (String) args[2]; + String reason = (String) args[3]; + Integer intShardId = (Integer) args[4]; + String status = (String) args[5]; + if (index == null) { throw new ElasticsearchParseException("index name was not set"); } - if (shardId == -1) { + if (intShardId == null) { throw new ElasticsearchParseException("index shard was not set"); } - snapshotShardFailure.shardId = new ShardId(index, index_uuid, shardId); + + ShardId shardId = new ShardId(index, indexUuid != null ? indexUuid : IndexMetaData.INDEX_UUID_NA_VALUE, intShardId); + // Workaround for https://github.com/elastic/elasticsearch/issues/25878 // Some old snapshot might still have null in shard failure reasons - if (snapshotShardFailure.reason == null) { - snapshotShardFailure.reason = ""; + String nonNullReason; + if (reason != null) { + nonNullReason = reason; + } else { + nonNullReason = ""; + } + + + RestStatus restStatus; + if (status != null) { + restStatus = RestStatus.valueOf(status); + } else { + restStatus = RestStatus.INTERNAL_SERVER_ERROR; } - return snapshotShardFailure; + + return new SnapshotShardFailure(nodeId, shardId, nonNullReason, restStatus); + } + + /** + * Deserializes snapshot failure information from JSON + * + * @param parser JSON parser + * @return snapshot failure information + */ + public static SnapshotShardFailure fromXContent(XContentParser parser) throws IOException { + return SNAPSHOT_SHARD_FAILURE_PARSER.parse(parser, null); } @Override @@ -238,4 +260,23 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("status", status.name()); return builder; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SnapshotShardFailure that = (SnapshotShardFailure) o; + // customized to account for discrepancies in shardId/Index toXContent/fromXContent related to uuid + return shardId.id() == that.shardId.id() && + shardId.getIndexName().equals(shardId.getIndexName()) && + Objects.equals(reason, that.reason) && + Objects.equals(nodeId, that.nodeId) && + status.getStatus() == that.status.getStatus(); + } + + @Override + public int hashCode() { + // customized to account for discrepancies in shardId/Index toXContent/fromXContent related to uuid + return Objects.hash(shardId.id(), shardId.getIndexName(), reason, nodeId, status.getStatus()); + } } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index c86ea61980a87..91d7b1b372e51 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -29,6 +29,7 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -369,9 +370,11 @@ void forceConnect() { private void connect(ActionListener connectListener, boolean forceRun) { final boolean runConnect; final Collection> toNotify; + final ActionListener listener = connectListener == null ? null : + ContextPreservingActionListener.wrapPreservingContext(connectListener, transportService.getThreadPool().getThreadContext()); synchronized (queue) { - if (connectListener != null && queue.offer(connectListener) == false) { - connectListener.onFailure(new RejectedExecutionException("connect queue is full")); + if (listener != null && queue.offer(listener) == false) { + listener.onFailure(new RejectedExecutionException("connect queue is full")); return; } if (forceRun == false && queue.isEmpty()) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java new file mode 100644 index 0000000000000..9e484217870eb --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java @@ -0,0 +1,106 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.create; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class CreateSnapshotRequestTests extends ESTestCase { + + // tests creating XContent and parsing with source(Map) equivalency + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31625") + public void testToXContent() throws IOException { + String repo = randomAlphaOfLength(5); + String snap = randomAlphaOfLength(10); + + CreateSnapshotRequest original = new CreateSnapshotRequest(repo, snap); + + if (randomBoolean()) { // replace + List indices = new ArrayList<>(); + int count = randomInt(3) + 1; + + for (int i = 0; i < count; ++i) { + indices.add(randomAlphaOfLength(randomInt(3) + 2)); + } + + original.indices(indices); + } + + if (randomBoolean()) { // replace + original.partial(randomBoolean()); + } + + if (randomBoolean()) { // replace + Map settings = new HashMap<>(); + int count = randomInt(3) + 1; + + for (int i = 0; i < count; ++i) { + settings.put(randomAlphaOfLength(randomInt(3) + 2), randomAlphaOfLength(randomInt(3) + 2)); + } + + } + + if (randomBoolean()) { // replace + original.includeGlobalState(randomBoolean()); + } + + if (randomBoolean()) { // replace + IndicesOptions[] indicesOptions = new IndicesOptions[] { + IndicesOptions.STRICT_EXPAND_OPEN, + IndicesOptions.STRICT_EXPAND_OPEN_CLOSED, + IndicesOptions.LENIENT_EXPAND_OPEN, + IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED, + IndicesOptions.STRICT_SINGLE_INDEX_NO_EXPAND_FORBID_CLOSED}; + + original.indicesOptions(randomFrom(indicesOptions)); + } + + if (randomBoolean()) { // replace + original.waitForCompletion(randomBoolean()); + } + + if (randomBoolean()) { // replace + original.masterNodeTimeout("60s"); + } + + XContentBuilder builder = original.toXContent(XContentFactory.jsonBuilder(), null); + XContentParser parser = XContentType.JSON.xContent().createParser( + NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput()); + Map map = parser.mapOrdered(); + CreateSnapshotRequest processed = new CreateSnapshotRequest((String)map.get("repository"), (String)map.get("snapshot")); + processed.waitForCompletion((boolean)map.getOrDefault("wait_for_completion", false)); + processed.masterNodeTimeout((String)map.getOrDefault("master_node_timeout", "30s")); + processed.source(map); + + assertEquals(original, processed); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java new file mode 100644 index 0000000000000..bbfc9755bf215 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java @@ -0,0 +1,71 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.create; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotShardFailure; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +public class CreateSnapshotResponseTests extends AbstractXContentTestCase { + + @Override + protected CreateSnapshotResponse doParseInstance(XContentParser parser) throws IOException { + return CreateSnapshotResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected CreateSnapshotResponse createTestInstance() { + SnapshotId snapshotId = new SnapshotId("test", UUID.randomUUID().toString()); + List indices = new ArrayList<>(); + indices.add("test0"); + indices.add("test1"); + String reason = "reason"; + long startTime = System.currentTimeMillis(); + long endTime = startTime + 10000; + int totalShards = randomIntBetween(1, 3); + int successfulShards = randomIntBetween(0, totalShards); + List shardFailures = new ArrayList<>(); + + for (int count = successfulShards; count < totalShards; ++count) { + shardFailures.add(new SnapshotShardFailure( + "node-id", new ShardId("index-" + count, UUID.randomUUID().toString(), randomInt()), "reason")); + } + + boolean globalState = randomBoolean(); + + CreateSnapshotResponse response = new CreateSnapshotResponse(); + response.setSnapshotInfo( + new SnapshotInfo(snapshotId, indices, startTime, reason, endTime, totalShards, shardFailures, globalState)); + return response; + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java new file mode 100644 index 0000000000000..c5bd7d9f38ac1 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponseTests.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.get; + +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotShardFailure; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class GetSnapshotsResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected GetSnapshotsResponse doParseInstance(XContentParser parser) throws IOException { + return GetSnapshotsResponse.fromXContent(parser); + } + + @Override + protected GetSnapshotsResponse createBlankInstance() { + return new GetSnapshotsResponse(); + } + + @Override + protected GetSnapshotsResponse createTestInstance() { + ArrayList snapshots = new ArrayList<>(); + for (int i = 0; i < randomIntBetween(5, 10); ++i) { + SnapshotId snapshotId = new SnapshotId("snapshot " + i, UUIDs.base64UUID()); + String reason = randomBoolean() ? null : "reason"; + ShardId shardId = new ShardId("index", UUIDs.base64UUID(), 2); + List shardFailures = Collections.singletonList(new SnapshotShardFailure("node-id", shardId, "reason")); + snapshots.add(new SnapshotInfo(snapshotId, Arrays.asList("indice1", "indice2"), System.currentTimeMillis(), reason, + System.currentTimeMillis(), randomIntBetween(2, 3), shardFailures, randomBoolean())); + + } + return new GetSnapshotsResponse(snapshots); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/ExplainRequestTests.java b/server/src/test/java/org/elasticsearch/action/explain/ExplainRequestTests.java similarity index 97% rename from server/src/test/java/org/elasticsearch/action/ExplainRequestTests.java rename to server/src/test/java/org/elasticsearch/action/explain/ExplainRequestTests.java index 9f68d28b4422b..be636e7d9875f 100644 --- a/server/src/test/java/org/elasticsearch/action/ExplainRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/explain/ExplainRequestTests.java @@ -16,9 +16,8 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action; +package org.elasticsearch.action.explain; -import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; diff --git a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java new file mode 100644 index 0000000000000..ca5c35ccab3ed --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.explain; + +import org.apache.lucene.search.Explanation; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.test.RandomObjects; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Predicate; + +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.equalTo; + +public class ExplainResponseTests extends AbstractStreamableXContentTestCase { + @Override + protected ExplainResponse doParseInstance(XContentParser parser) throws IOException { + return ExplainResponse.fromXContent(parser, randomBoolean()); + } + + @Override + protected ExplainResponse createBlankInstance() { + return new ExplainResponse(); + } + + @Override + protected ExplainResponse createTestInstance() { + String index = randomAlphaOfLength(5); + String type = randomAlphaOfLength(5); + String id = String.valueOf(randomIntBetween(1,100)); + boolean exist = randomBoolean(); + Explanation explanation = randomExplanation(randomExplanation(randomExplanation()), randomExplanation()); + String fieldName = randomAlphaOfLength(10); + List values = Arrays.asList(randomAlphaOfLengthBetween(3, 10), randomInt(), randomLong(), randomDouble(), randomBoolean()); + GetResult getResult = new GetResult(randomAlphaOfLengthBetween(3, 10), + randomAlphaOfLengthBetween(3, 10), + randomAlphaOfLengthBetween(3, 10), + randomNonNegativeLong(), + true, + RandomObjects.randomSource(random()), + singletonMap(fieldName, new DocumentField(fieldName, values))); + return new ExplainResponse(index, type, id, exist, explanation, getResult); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> field.equals("get") || field.startsWith("get.fields") || field.startsWith("get._source"); + } + + public void testToXContent() throws IOException { + String index = "index"; + String type = "type"; + String id = "1"; + boolean exist = true; + Explanation explanation = Explanation.match(1.0f, "description", Collections.emptySet()); + GetResult getResult = new GetResult(null, null, null, -1, true, new BytesArray("{ \"field1\" : " + + "\"value1\", \"field2\":\"value2\"}"), singletonMap("field1", new DocumentField("field1", + singletonList("value1")))); + ExplainResponse response = new ExplainResponse(index, type, id, exist, explanation, getResult); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + response.toXContent(builder, ToXContent.EMPTY_PARAMS); + + String generatedResponse = BytesReference.bytes(builder).utf8ToString().replaceAll("\\s+", ""); + + String expectedResponse = + ("{\n" + + " \"_index\":\"index\",\n" + + " \"_type\":\"type\",\n" + + " \"_id\":\"1\",\n" + + " \"matched\":true,\n" + + " \"explanation\":{\n" + + " \"value\":1.0,\n" + + " \"description\":\"description\",\n" + + " \"details\":[]\n" + + " },\n" + + " \"get\":{\n" + + " \"found\":true,\n" + + " \"_source\":{\n" + + " \"field1\":\"value1\",\n" + + " \"field2\":\"value2\"\n" + + " },\n" + + " \"fields\":{\n" + + " \"field1\":[\n" + + " \"value1\"\n" + + " ]\n" + + " }\n" + + " }\n" + + "}").replaceAll("\\s+", ""); + assertThat(expectedResponse, equalTo(generatedResponse)); + } + + private static Explanation randomExplanation(Explanation... explanations) { + return Explanation.match(randomFloat(), randomAlphaOfLengthBetween(1, 10), + explanations.length > 0 ? explanations : new Explanation[0]); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java index 4f1fa4cf06116..4bd4406d81cca 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java @@ -19,50 +19,43 @@ package org.elasticsearch.action.search; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; +import java.util.function.Predicate; +import java.util.function.Supplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; -public class MultiSearchResponseTests extends ESTestCase { +public class MultiSearchResponseTests extends AbstractXContentTestCase { - public void testFromXContent() throws IOException { - for (int runs = 0; runs < 20; runs++) { - MultiSearchResponse expected = createTestInstance(); - XContentType xContentType = randomFrom(XContentType.values()); - BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); - MultiSearchResponse actual; - try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { - actual = MultiSearchResponse.fromXContext(parser); - assertThat(parser.nextToken(), nullValue()); - } - - assertThat(actual.getTook(), equalTo(expected.getTook())); - assertThat(actual.getResponses().length, equalTo(expected.getResponses().length)); - for (int i = 0; i < expected.getResponses().length; i++) { - MultiSearchResponse.Item expectedItem = expected.getResponses()[i]; - MultiSearchResponse.Item actualItem = actual.getResponses()[i]; - if (expectedItem.isFailure()) { - assertThat(actualItem.getResponse(), nullValue()); - assertThat(actualItem.getFailureMessage(), containsString(expectedItem.getFailureMessage())); - } else { - assertThat(actualItem.getResponse().toString(), equalTo(expectedItem.getResponse().toString())); - assertThat(actualItem.getFailure(), nullValue()); - } - } + @Override + protected MultiSearchResponse createTestInstance() { + int numItems = randomIntBetween(0, 128); + MultiSearchResponse.Item[] items = new MultiSearchResponse.Item[numItems]; + for (int i = 0; i < numItems; i++) { + // Creating a minimal response is OK, because SearchResponse self + // is tested elsewhere. + long tookInMillis = randomNonNegativeLong(); + int totalShards = randomIntBetween(1, Integer.MAX_VALUE); + int successfulShards = randomIntBetween(0, totalShards); + int skippedShards = totalShards - successfulShards; + InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); + SearchResponse.Clusters clusters = new SearchResponse.Clusters(totalShards, successfulShards, skippedShards); + SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, totalShards, + successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters); + items[i] = new MultiSearchResponse.Item(searchResponse, null); } + return new MultiSearchResponse(items, randomNonNegativeLong()); } - private static MultiSearchResponse createTestInstance() { + private static MultiSearchResponse createTestInstanceWithFailures() { int numItems = randomIntBetween(0, 128); MultiSearchResponse.Item[] items = new MultiSearchResponse.Item[numItems]; for (int i = 0; i < numItems; i++) { @@ -85,4 +78,52 @@ private static MultiSearchResponse createTestInstance() { return new MultiSearchResponse(items, randomNonNegativeLong()); } + @Override + protected MultiSearchResponse doParseInstance(XContentParser parser) throws IOException { + return MultiSearchResponse.fromXContext(parser); + } + + @Override + protected void assertEqualInstances(MultiSearchResponse expected, MultiSearchResponse actual) { + assertThat(actual.getTook(), equalTo(expected.getTook())); + assertThat(actual.getResponses().length, equalTo(expected.getResponses().length)); + for (int i = 0; i < expected.getResponses().length; i++) { + MultiSearchResponse.Item expectedItem = expected.getResponses()[i]; + MultiSearchResponse.Item actualItem = actual.getResponses()[i]; + if (expectedItem.isFailure()) { + assertThat(actualItem.getResponse(), nullValue()); + assertThat(actualItem.getFailureMessage(), containsString(expectedItem.getFailureMessage())); + } else { + assertThat(actualItem.getResponse().toString(), equalTo(expectedItem.getResponse().toString())); + assertThat(actualItem.getFailure(), nullValue()); + } + } + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + protected Predicate getRandomFieldsExcludeFilterWhenResultHasErrors() { + return field -> field.startsWith("responses"); + } + + /** + * Test parsing {@link MultiSearchResponse} with inner failures as they don't support asserting on xcontent equivalence, given that + * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} + * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = MultiSearchResponseTests::createTestInstanceWithFailures; + //with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, + //but that does not bother our assertions, as we only want to test that we don't break. + boolean supportsUnknownFields = true; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, Strings.EMPTY_ARRAY, + getRandomFieldsExcludeFilterWhenResultHasErrors(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, ToXContent.EMPTY_PARAMS); + } + } diff --git a/server/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java b/server/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java index 5247a224423ec..53efeb393e4b4 100644 --- a/server/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java +++ b/server/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java @@ -81,16 +81,12 @@ protected Settings prepareBackwardsDataDir(Path backwardsIndex) throws IOExcepti return builder.build(); } - public void testUpgradeStartClusterOn_0_20_6() throws Exception { - String indexName = "unsupported-0.20.6"; + public void testUpgradeStartClusterOn_2_4_5() throws Exception { + String indexName = "unsupported-2.4.5"; logger.info("Checking static index {}", indexName); Settings nodeSettings = prepareBackwardsDataDir(getBwcIndicesPath().resolve(indexName + ".zip")); - try { - internalCluster().startNode(nodeSettings); - fail(); - } catch (Exception ex) { - assertThat(ex.getCause().getCause().getMessage(), containsString(" was created before v2.0.0.beta1 and wasn't upgraded")); - } + assertThat(expectThrows(Exception.class, () -> internalCluster().startNode(nodeSettings)) + .getCause().getCause().getMessage(), containsString("Format version is not supported")); } } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 0efeae29c3cce..170ea6cf9313d 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -863,7 +863,7 @@ public void testEnsureNoSelfReferences() throws IOException { /** * Test that the same map written multiple times do not trigger the self-reference check in - * {@link CollectionUtils#ensureNoSelfReferences(Object)} + * {@link CollectionUtils#ensureNoSelfReferences(Object, String)} (Object)} */ public void testRepeatedMapsAndNoSelfReferences() throws Exception { Map mapB = singletonMap("b", "B"); diff --git a/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index 6a8cf5bf6ab43..d236d01f049dd 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; @@ -92,7 +91,7 @@ public MetaData fromXContent(XContentParser parser) throws IOException { Files.copy(resource, dst); MetaData read = format.read(xContentRegistry(), dst); assertThat(read, notNullValue()); - assertThat(read.clusterUUID(), equalTo("3O1tDF1IRB6fSJ-GrTMUtg")); + assertThat(read.clusterUUID(), equalTo("y9XcwLJGTROoOEfixlRwfQ")); // indices are empty since they are serialized separately } @@ -237,7 +236,6 @@ public static void corruptFile(Path file, Logger logger) throws IOException { public void testLoadState() throws IOException { final Path[] dirs = new Path[randomIntBetween(1, 5)]; int numStates = randomIntBetween(1, 5); - int numLegacy = randomIntBetween(0, numStates); List meta = new ArrayList<>(); for (int i = 0; i < numStates; i++) { meta.add(randomMeta()); @@ -247,20 +245,7 @@ public void testLoadState() throws IOException { for (int i = 0; i < dirs.length; i++) { dirs[i] = createTempDir(); Files.createDirectories(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME)); - for (int j = 0; j < numLegacy; j++) { - if (randomBoolean() && (j < numStates - 1 || dirs.length > 0 && i != 0)) { - Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-"+j); - Files.createFile(file); // randomly create 0-byte files -- there is extra logic to skip them - } else { - try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(MetaDataStateFormat.FORMAT, - Files.newOutputStream(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j)))) { - xcontentBuilder.startObject(); - MetaData.Builder.toXContent(meta.get(j), xcontentBuilder, ToXContent.EMPTY_PARAMS); - xcontentBuilder.endObject(); - } - } - } - for (int j = numLegacy; j < numStates; j++) { + for (int j = 0; j < numStates; j++) { format.write(meta.get(j), dirs[i]); if (randomBoolean() && (j < numStates - 1 || dirs.length > 0 && i != 0)) { // corrupt a file that we do not necessarily need here.... Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j + ".st"); @@ -290,20 +275,18 @@ public void testLoadState() throws IOException { assertThat(loadedMetaData.indexGraveyard(), equalTo(latestMetaData.indexGraveyard())); // now corrupt all the latest ones and make sure we fail to load the state - if (numStates > numLegacy) { - for (int i = 0; i < dirs.length; i++) { - Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + (numStates-1) + ".st"); - if (corruptedFiles.contains(file)) { - continue; - } - MetaDataStateFormatTests.corruptFile(file, logger); - } - try { - format.loadLatestState(logger, xContentRegistry(), dirList.toArray(new Path[0])); - fail("latest version can not be read"); - } catch (ElasticsearchException ex) { - assertThat(ExceptionsHelper.unwrap(ex, CorruptStateException.class), notNullValue()); + for (int i = 0; i < dirs.length; i++) { + Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + (numStates-1) + ".st"); + if (corruptedFiles.contains(file)) { + continue; } + MetaDataStateFormatTests.corruptFile(file, logger); + } + try { + format.loadLatestState(logger, xContentRegistry(), dirList.toArray(new Path[0])); + fail("latest version can not be read"); + } catch (ElasticsearchException ex) { + assertThat(ExceptionsHelper.unwrap(ex, CorruptStateException.class), notNullValue()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/package-info.java b/server/src/test/java/org/elasticsearch/index/mapper/package-info.java index 4221a5d4a9668..9a06e56a820a5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/package-info.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/package-info.java @@ -19,9 +19,7 @@ /** * Mappings. Mappings define the way that documents should be translated to - * Lucene indices, for instance how the - * {@link org.elasticsearch.index.mapper.UidFieldMapper document identifier} - * should be indexed, whether a string field should be indexed as a + * Lucene indices, for instance whether a string field should be indexed as a * {@link org.elasticsearch.index.mapper.TextFieldMapper text} or * {@link org.elasticsearch.index.mapper.KeywordFieldMapper keyword} field, * etc. This parsing is done by the diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 26e6f4c076553..c40e3b73c6606 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -1345,8 +1345,6 @@ public void testExceptionOnNegativeInterval() { * https://github.com/elastic/elasticsearch/issues/31392 demonstrates an edge case where a date field mapping with * "format" = "epoch_millis" can lead for the date histogram aggregation to throw an error if a non-UTC time zone * with daylight savings time is used. This test was added to check this is working now - * @throws ExecutionException - * @throws InterruptedException */ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, ExecutionException { String index = "test31392"; diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 0369eda2a8899..5522550f76c7a 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CancellableThreads; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.core.internal.io.IOUtils; @@ -76,6 +77,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.iterableWithSize; @@ -555,6 +557,64 @@ public void testFetchShards() throws Exception { } } + public void testFetchShardsThreadContextHeader() throws Exception { + List knownNodes = new CopyOnWriteArrayList<>(); + try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); + MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) { + DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); + knownNodes.add(seedTransport.getLocalDiscoNode()); + knownNodes.add(discoverableTransport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { + service.start(); + service.acceptIncomingRequests(); + List nodes = Collections.singletonList(seedNode); + try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", + nodes, service, Integer.MAX_VALUE, n -> true)) { + SearchRequest request = new SearchRequest("test-index"); + Thread[] threads = new Thread[10]; + for (int i = 0; i < threads.length; i++) { + final String threadId = Integer.toString(i); + threads[i] = new Thread(() -> { + ThreadContext threadContext = seedTransport.threadPool.getThreadContext(); + threadContext.putHeader("threadId", threadId); + AtomicReference reference = new AtomicReference<>(); + AtomicReference failReference = new AtomicReference<>(); + final ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index") + .indicesOptions(request.indicesOptions()).local(true).preference(request.preference()) + .routing(request.routing()); + CountDownLatch responseLatch = new CountDownLatch(1); + connection.fetchSearchShards(searchShardsRequest, + new LatchedActionListener<>(ActionListener.wrap( + resp -> { + reference.set(resp); + assertEquals(threadId, seedTransport.threadPool.getThreadContext().getHeader("threadId")); + }, + failReference::set), responseLatch)); + try { + responseLatch.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + assertNull(failReference.get()); + assertNotNull(reference.get()); + ClusterSearchShardsResponse clusterSearchShardsResponse = reference.get(); + assertEquals(knownNodes, Arrays.asList(clusterSearchShardsResponse.getNodes())); + }); + } + for (int i = 0; i < threads.length; i++) { + threads[i].start(); + } + + for (int i = 0; i < threads.length; i++) { + threads[i].join(); + } + assertTrue(connection.assertNoRunningConnections()); + } + } + } + } + public void testFetchShardsSkipUnavailable() throws Exception { List knownNodes = new CopyOnWriteArrayList<>(); try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT)) { @@ -795,7 +855,13 @@ public void run() { throw new AssertionError(x); } }); - connection.updateSeedNodes(seedNodes, listener); + try { + connection.updateSeedNodes(seedNodes, listener); + } catch (Exception e) { + // it's ok if we're shutting down + assertThat(e.getMessage(), containsString("threadcontext is already closed")); + latch.countDown(); + } } latch.await(); } catch (Exception ex) { diff --git a/server/src/test/resources/indices/bwc/compressed-repo-1.7.4.zip b/server/src/test/resources/indices/bwc/compressed-repo-1.7.4.zip deleted file mode 100644 index 9edf7d57527e5..0000000000000 Binary files a/server/src/test/resources/indices/bwc/compressed-repo-1.7.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.20.6.zip b/server/src/test/resources/indices/bwc/unsupported-0.20.6.zip deleted file mode 100644 index 753b577d3ad01..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.20.6.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.0.Beta1.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.0.Beta1.zip deleted file mode 100644 index 5bbdea4a96322..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.0.Beta1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.0.RC1.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.0.RC1.zip deleted file mode 100644 index d9072ce465c87..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.0.RC1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.0.RC2.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.0.RC2.zip deleted file mode 100644 index dce299b7d6082..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.0.RC2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.0.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.0.zip deleted file mode 100644 index 3ec908ddc27e9..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.1.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.1.zip deleted file mode 100644 index 67db98fc3ce90..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.10.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.10.zip deleted file mode 100644 index 6bdb9f27c8d28..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.10.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.11.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.11.zip deleted file mode 100644 index b5253f9a673b9..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.11.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.12.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.12.zip deleted file mode 100644 index 0392049bb9dd5..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.12.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.13.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.13.zip deleted file mode 100644 index 025b4c38b13fa..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.13.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.2.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.2.zip deleted file mode 100644 index 413e08e65816f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.3.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.3.zip deleted file mode 100644 index c31d4de7c5bbc..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.4.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.4.zip deleted file mode 100644 index 8b07a92493532..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.5.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.5.zip deleted file mode 100644 index dfd0fd09e7bca..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.5.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.6.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.6.zip deleted file mode 100644 index 1f3cff28e7a2f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.6.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.7.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.7.zip deleted file mode 100644 index 6d0e65c28c42e..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.7.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.8.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.8.zip deleted file mode 100644 index 8ff8ac3ddf2ff..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.8.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.9.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.9.zip deleted file mode 100644 index 4445b3905accd..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.9.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.0.Beta1.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.0.Beta1.zip deleted file mode 100644 index 167dde888dfe6..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.0.Beta1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.0.Beta2.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.0.Beta2.zip deleted file mode 100644 index 95fbfefb8702f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.0.Beta2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.0.RC1.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.0.RC1.zip deleted file mode 100644 index 3ced97aa2a995..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.0.RC1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.0.RC2.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.0.RC2.zip deleted file mode 100644 index 1298cfbfd1df4..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.0.RC2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.0.zip deleted file mode 100644 index 2cb9abc43dcc7..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.1.zip deleted file mode 100644 index 844271b58c459..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.2.zip deleted file mode 100644 index dd8e39388dff4..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.3.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.3.zip deleted file mode 100644 index e4437effa243b..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.1.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.1.0.zip deleted file mode 100644 index 4f05370b1298a..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.1.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.1.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.1.1.zip deleted file mode 100644 index effeb94287b2f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.1.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.1.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.1.2.zip deleted file mode 100644 index bedffa44f195d..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.1.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.2.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.2.0.zip deleted file mode 100644 index 4644a38933378..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.2.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.2.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.2.1.zip deleted file mode 100644 index 553b46b06ad36..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.2.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.2.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.2.2.zip deleted file mode 100644 index 3f51a47534e0d..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.2.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.2.3.zip b/server/src/test/resources/indices/bwc/unsupported-1.2.3.zip deleted file mode 100644 index 8c8bfbd908b2d..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.2.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.2.4.zip b/server/src/test/resources/indices/bwc/unsupported-1.2.4.zip deleted file mode 100644 index e3a151930e7b0..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.2.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.0.zip deleted file mode 100644 index d98958dea334c..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.1.zip deleted file mode 100644 index 167d0f4c94639..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.2.zip deleted file mode 100644 index 756eaf68ac2cd..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.3.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.3.zip deleted file mode 100644 index 8470deefd8f38..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.4.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.4.zip deleted file mode 100644 index 2175012fc6dcc..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.5.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.5.zip deleted file mode 100644 index 19d1e568d849f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.5.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.6.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.6.zip deleted file mode 100644 index ad8e8bd003c62..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.6.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.7.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.7.zip deleted file mode 100644 index 3a645a9dd55c5..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.7.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.8.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.8.zip deleted file mode 100644 index f8ab0a26dc62d..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.8.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.9.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.9.zip deleted file mode 100644 index 5ef35b21ced7c..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.9.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.0.Beta1.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.0.Beta1.zip deleted file mode 100644 index 4546f5d4aa320..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.0.Beta1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.0.zip deleted file mode 100644 index 467d19aa5edcb..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.1.zip deleted file mode 100644 index 2adbb28caedb4..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.2.zip deleted file mode 100644 index 4fac2086e5d5e..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.3.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.3.zip deleted file mode 100644 index 1a0d66745a994..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.4.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.4.zip deleted file mode 100644 index 0328a9e2d570b..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.5.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.5.zip deleted file mode 100644 index eeb25ab6ed95f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.5.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.5.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.5.0.zip deleted file mode 100644 index f1dab0883149d..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.5.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.5.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.5.1.zip deleted file mode 100644 index 342e311e56db9..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.5.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.5.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.5.2.zip deleted file mode 100644 index fb36b197c4a8e..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.5.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.6.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.6.0.zip deleted file mode 100644 index 02a5806638b61..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.6.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.6.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.6.1.zip deleted file mode 100644 index 04820f92b0701..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.6.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.6.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.6.2.zip deleted file mode 100644 index af6ce561fa6a2..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.6.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.7.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.7.0.zip deleted file mode 100644 index 941be645c65b9..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.7.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.7.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.7.1.zip deleted file mode 100644 index debd797162b70..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.7.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.7.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.7.2.zip deleted file mode 100644 index 18bb6c7dfc9b6..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.7.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.7.3.zip b/server/src/test/resources/indices/bwc/unsupported-1.7.3.zip deleted file mode 100644 index 9fcc1788ea953..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.7.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.7.4.zip b/server/src/test/resources/indices/bwc/unsupported-1.7.4.zip deleted file mode 100644 index a47ff4faffc93..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.7.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.7.5.zip b/server/src/test/resources/indices/bwc/unsupported-1.7.5.zip deleted file mode 100644 index 22625293a1c37..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.7.5.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.0.0-beta1.zip b/server/src/test/resources/indices/bwc/unsupported-2.0.0-beta1.zip deleted file mode 100644 index 6d609479552f5..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.0.0-beta1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.0.0-beta2.zip b/server/src/test/resources/indices/bwc/unsupported-2.0.0-beta2.zip deleted file mode 100644 index 6732f715cfe3b..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.0.0-beta2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.0.0-rc1.zip b/server/src/test/resources/indices/bwc/unsupported-2.0.0-rc1.zip deleted file mode 100644 index 8c440725e9cd9..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.0.0-rc1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.0.0.zip b/server/src/test/resources/indices/bwc/unsupported-2.0.0.zip deleted file mode 100644 index cc0a0ae532025..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.0.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.0.1.zip b/server/src/test/resources/indices/bwc/unsupported-2.0.1.zip deleted file mode 100644 index 81a31d18f816c..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.0.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.0.2.zip b/server/src/test/resources/indices/bwc/unsupported-2.0.2.zip deleted file mode 100644 index 63be140108cd2..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.0.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.1.0.zip b/server/src/test/resources/indices/bwc/unsupported-2.1.0.zip deleted file mode 100644 index dff157c2ab23b..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.1.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.1.1.zip b/server/src/test/resources/indices/bwc/unsupported-2.1.1.zip deleted file mode 100644 index b7c408e559758..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.1.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.1.2.zip b/server/src/test/resources/indices/bwc/unsupported-2.1.2.zip deleted file mode 100644 index d6a4f9fddfa02..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.1.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.2.0.zip b/server/src/test/resources/indices/bwc/unsupported-2.2.0.zip deleted file mode 100644 index 5c9eba5c61653..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.2.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.2.1.zip b/server/src/test/resources/indices/bwc/unsupported-2.2.1.zip deleted file mode 100644 index 3596820a44a7d..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.2.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.2.2.zip b/server/src/test/resources/indices/bwc/unsupported-2.2.2.zip deleted file mode 100644 index 788ba0712b5c9..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.2.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.3.0.zip b/server/src/test/resources/indices/bwc/unsupported-2.3.0.zip deleted file mode 100644 index 212d3f8c7cf71..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.3.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.3.1.zip b/server/src/test/resources/indices/bwc/unsupported-2.3.1.zip deleted file mode 100644 index b825872bb55a8..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.3.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.3.2.zip b/server/src/test/resources/indices/bwc/unsupported-2.3.2.zip deleted file mode 100644 index f6b8ec502d9d3..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.3.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.3.3.zip b/server/src/test/resources/indices/bwc/unsupported-2.3.3.zip deleted file mode 100644 index e349aac537670..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.3.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.3.4.zip b/server/src/test/resources/indices/bwc/unsupported-2.3.4.zip deleted file mode 100644 index bc75ad093cfb6..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.3.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.3.5.zip b/server/src/test/resources/indices/bwc/unsupported-2.3.5.zip deleted file mode 100644 index c01af7a206219..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.3.5.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.4.0.zip b/server/src/test/resources/indices/bwc/unsupported-2.4.0.zip deleted file mode 100644 index 5055ded5f87ac..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.4.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.4.1.zip b/server/src/test/resources/indices/bwc/unsupported-2.4.1.zip deleted file mode 100644 index 6dc29439a0f9e..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.4.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.4.2.zip b/server/src/test/resources/indices/bwc/unsupported-2.4.2.zip deleted file mode 100644 index f8f31d00dcc0f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.4.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.4.3.zip b/server/src/test/resources/indices/bwc/unsupported-2.4.3.zip deleted file mode 100644 index 9b4ad26db1e5e..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.4.3.zip and /dev/null differ diff --git a/server/src/test/resources/org/elasticsearch/gateway/global-3.st b/server/src/test/resources/org/elasticsearch/gateway/global-3.st index b6b4e8d762eff..ea8cb5bf257a8 100644 Binary files a/server/src/test/resources/org/elasticsearch/gateway/global-3.st and b/server/src/test/resources/org/elasticsearch/gateway/global-3.st differ diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 91a1b4a3a4ee5..61538c44aaca5 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -292,7 +292,7 @@ protected IndexShard newShard(ShardRouting routing, IndexMetaData indexMetaData, * @param indexMetaData indexMetaData for the shard, including any mapping * @param indexSearcherWrapper an optional wrapper to be used during searchers * @param globalCheckpointSyncer callback for syncing global checkpoints - * @param indexEventListener + * @param indexEventListener index even listener * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMetaData indexMetaData, diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 48301fa5746e2..a2acc5371a19e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -132,7 +132,6 @@ protected String[] shuffleProtectedFields() { * To find the right position in the root query, we add a marker as `queryName` which * all query builders support. The added bogus field after that should trigger the exception. * Queries that allow arbitrary field names at this level need to override this test. - * @throws IOException */ public void testUnknownField() throws IOException { String marker = "#marker#"; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 6afc123520bb0..69f4e0666eaa3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; @@ -105,6 +106,7 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { private static List blacklistPathMatchers; private static ClientYamlTestExecutionContext restTestExecutionContext; private static ClientYamlTestExecutionContext adminExecutionContext; + private static ClientYamlTestClient clientYamlTestClient; private final ClientYamlTestCandidate testCandidate; @@ -136,7 +138,7 @@ public void initAndResetContext() throws Exception { final Version esVersion = versionVersionTuple.v1(); final Version masterVersion = versionVersionTuple.v2(); logger.info("initializing client, minimum es version [{}], master version, [{}], hosts {}", esVersion, masterVersion, hosts); - final ClientYamlTestClient clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, esVersion, masterVersion); + clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, esVersion, masterVersion); restTestExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, randomizeContentType()); adminExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, false); final String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); @@ -169,6 +171,18 @@ protected ClientYamlTestClient initClientYamlTestClient( restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); } + @AfterClass + public static void closeClient() throws IOException { + try { + IOUtils.close(clientYamlTestClient); + } finally { + blacklistPathMatchers = null; + restTestExecutionContext = null; + adminExecutionContext = null; + clientYamlTestClient = null; + } + } + /** * Create parameters for this parameterized test. Uses the * {@link ExecutableSection#XCONTENT_REGISTRY list} of executable sections @@ -266,13 +280,6 @@ private static void validateSpec(ClientYamlSuiteRestSpec restSpec) { } } - @AfterClass - public static void clearStatic() { - blacklistPathMatchers = null; - restTestExecutionContext = null; - adminExecutionContext = null; - } - private static Tuple readVersionsFromCatNodes(RestClient restClient) throws IOException { // we simply go to the _cat/nodes API and parse all versions in the cluster Response response = restClient.performRequest("GET", "/_cat/nodes", Collections.singletonMap("h", "version,master")); diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 3822ef1d4d584..3de63d76204bd 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -192,7 +192,7 @@ integTestCluster { return tmpFile.exists() } } -if (integTestCluster.distribution.startsWith("oss-") == false) { +if (integTestCluster.distribution.startsWith("oss-")) { integTest.enabled = false } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ExpirationCallback.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ExpirationCallback.java index 26bec9e62948f..d3916f9dd3ab1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ExpirationCallback.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ExpirationCallback.java @@ -134,8 +134,8 @@ final TimeValue delay(long expirationDate, long now) { } /** - * {@link SchedulerEngine.Schedule#nextScheduledTimeAfter(long, long)} with respect to - * license expiry date + * {@link org.elasticsearch.xpack.core.scheduler.SchedulerEngine.Schedule#nextScheduledTimeAfter(long, long)} + * with respect to license expiry date */ public final long nextScheduledTimeForExpiry(long expiryDate, long startTime, long time) { TimeValue delay = delay(expiryDate, time); @@ -169,4 +169,4 @@ public final String toString() { orientation.name(), TimeValue.timeValueMillis(min), TimeValue.timeValueMillis(max), TimeValue.timeValueMillis(frequency)); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/DerParser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/DerParser.java index fedbbb3194724..ae15c70e97b9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/DerParser.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/DerParser.java @@ -87,7 +87,8 @@ Asn1Object readAsn1Object() throws IOException { * Decode the length of the field. Can only support length * encoding up to 4 octets. *

    - *

    In BER/DER encoding, length can be encoded in 2 forms, + * In BER/DER encoding, length can be encoded in 2 forms: + *

    *
      *
    • Short form. One octet. Bit 8 has value "0" and bits 7-1 * give the length. @@ -100,7 +101,6 @@ Asn1Object readAsn1Object() throws IOException { *
    * * @return The length as integer - * @throws IOException */ private int getLength() throws IOException { @@ -145,7 +145,8 @@ static class Asn1Object { * Construct a ASN.1 TLV. The TLV could be either a * constructed or primitive entity. *

    - *

    The first byte in DER encoding is made of following fields, + * The first byte in DER encoding is made of following fields: + *

    *
              * -------------------------------------------------
              * |Bit 8|Bit 7|Bit 6|Bit 5|Bit 4|Bit 3|Bit 2|Bit 1|
    @@ -192,7 +193,6 @@ public boolean isConstructed() {
              * For constructed field, return a parser for its content.
              *
              * @return A parser for the construct.
    -         * @throws IOException
              */
             public DerParser getParser() throws IOException {
                 if (!isConstructed())
    @@ -205,7 +205,6 @@ public DerParser getParser() throws IOException {
              * Get the value as integer
              *
              * @return BigInteger
    -         * @throws IOException
              */
             public BigInteger getInteger() throws IOException {
                 if (type != DerParser.INTEGER)
    diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/PemUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/PemUtils.java
    index 9ff44d0135ffa..d959c017e0a35 100644
    --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/PemUtils.java
    +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/PemUtils.java
    @@ -108,7 +108,6 @@ public static PrivateKey readPrivateKey(Path keyPath, Supplier passwordS
          * Removes the EC Headers that OpenSSL adds to EC private keys as the information in them
          * is redundant
          *
    -     * @param bReader
          * @throws IOException if the EC Parameter footer is missing
          */
         private static BufferedReader removeECHeaders(BufferedReader bReader) throws IOException {
    @@ -133,7 +132,6 @@ private static BufferedReader removeECHeaders(BufferedReader bReader) throws IOE
          * Removes the DSA Params Headers that OpenSSL adds to DSA private keys as the information in them
          * is redundant
          *
    -     * @param bReader
          * @throws IOException if the EC Parameter footer is missing
          */
         private static BufferedReader removeDsaHeaders(BufferedReader bReader) throws IOException {
    diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManager.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManager.java
    index c49692dda98c1..8a82694785a28 100644
    --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManager.java
    +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManager.java
    @@ -132,7 +132,7 @@ private Set readCommonNames(X509Certificate certificate) throws Certific
          * Decodes the otherName CN from the certificate
          *
          * @param value       The DER Encoded Subject Alternative Name
    -     * @param certificate
    +     * @param certificate The certificate
          * @return the CN or null if it could not be parsed
          */
         private String decodeDerValue(byte[] value, X509Certificate certificate) {
    diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle
    index 8b991555c0670..4c3cc9eef9313 100644
    --- a/x-pack/plugin/ml/build.gradle
    +++ b/x-pack/plugin/ml/build.gradle
    @@ -99,7 +99,7 @@ task internalClusterTest(type: RandomizedTestingTask,
                              dependsOn: test.dependsOn) {
       configure(BuildPlugin.commonTestConfig(project))
       classpath = project.test.classpath
    -  testClassesDir = project.test.testClassesDir
    +  testClassesDirs = project.test.testClassesDirs
       include '**/*IT.class'
       systemProperty 'es.set.netty.runtime.available.processors', 'false'
     }
    diff --git a/x-pack/plugin/monitoring/build.gradle b/x-pack/plugin/monitoring/build.gradle
    index fbdb388e78e19..3fde6cd8c3775 100644
    --- a/x-pack/plugin/monitoring/build.gradle
    +++ b/x-pack/plugin/monitoring/build.gradle
    @@ -62,7 +62,7 @@ task internalClusterTest(type: RandomizedTestingTask,
                              dependsOn: test.dependsOn) {
       configure(BuildPlugin.commonTestConfig(project))
       classpath = project.test.classpath
    -  testClassesDir = project.test.testClassesDir
    +  testClassesDirs = project.test.testClassesDirs
       include '**/*IT.class'
       systemProperty 'es.set.netty.runtime.available.processors', 'false'
     }
    diff --git a/x-pack/plugin/rollup/build.gradle b/x-pack/plugin/rollup/build.gradle
    index d8ce1ca304763..18ef7abee5c64 100644
    --- a/x-pack/plugin/rollup/build.gradle
    +++ b/x-pack/plugin/rollup/build.gradle
    @@ -42,7 +42,7 @@ task internalClusterTest(type: RandomizedTestingTask,
             dependsOn: test.dependsOn) {
         configure(BuildPlugin.commonTestConfig(project))
         classpath = project.test.classpath
    -    testClassesDir = project.test.testClassesDir
    +    testClassesDirs = project.test.testClassesDirs
         include '**/*IT.class'
         systemProperty 'es.set.netty.runtime.available.processors', 'false'
     }
    diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java
    index 5abd701ce4b2e..a07f1e7d32e7c 100644
    --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java
    +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java
    @@ -405,7 +405,8 @@ private CompositeAggregationBuilder createCompositeBuilder(RollupJobConfig confi
         }
     
         /**
    -     * Creates the range query that limits the search to documents that appear before the maximum allowed time (see {@link this#maxBoundary}
    +     * Creates the range query that limits the search to documents that appear before the maximum allowed time
    +     * (see {@link #maxBoundary}
          * and on or after the last processed time.
          * @param position The current position of the pagination
          * @return The range query to execute
    diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle
    index 12533a389b5f1..4a9df6c9ccc5f 100644
    --- a/x-pack/plugin/security/build.gradle
    +++ b/x-pack/plugin/security/build.gradle
    @@ -17,7 +17,7 @@ dependencies {
         compileOnly project(path: ':plugins:transport-nio', configuration: 'runtime')
     
         testCompile project(path: xpackModule('monitoring'))
    -    testCompile project(path: xpackModule('sql:sql-proto'))
    +    testCompile project(path: xpackModule('sql:sql-action'))
     
         testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
     
    diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/RestorableContextClassLoader.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/RestorableContextClassLoader.java
    index f2e36ebf98273..95c68eab9c28a 100644
    --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/RestorableContextClassLoader.java
    +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/RestorableContextClassLoader.java
    @@ -12,7 +12,7 @@
     import org.elasticsearch.SpecialPermission;
     
     /**
    - * A try-with-resource compatible object for configuring a thread {@link Thread#contextClassLoader}.
    + * A try-with-resource compatible object for configuring a thread {@link Thread#getContextClassLoader()}.
      * On construction this class will set the current (or provided) thread's context class loader.
      * On {@link #close()}, it restores the previous value of the class loader.
      */
    diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlTestCase.java
    index 51a6d8732a5b3..adaba34a73aa3 100644
    --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlTestCase.java
    +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlTestCase.java
    @@ -64,7 +64,6 @@ public static void restoreLocale() throws Exception {
          * Generates signed certificate and associates with generated key pair.
          * @see #readRandomKeyPair(String)
          * @return X509Certificate a signed certificate, it's PrivateKey {@link Tuple}
    -     * @throws Exception
          */
         protected static Tuple readRandomKeyPair() throws Exception {
             return readRandomKeyPair("RSA");
    @@ -73,9 +72,7 @@ protected static Tuple readRandomKeyPair() throws E
         /**
          * Reads a key pair and associated certificate for given algorithm and key length
          * For testing, for "EC" algorithm 256 key size is used, others use 2048 as default.
    -     * @param algorithm
    -     * @return X509Certificate a signed certificate, it's PrivateKey {@link Tuple}
    -     * @throws Exception
    +     * @return X509Certificate a signed certificate, it's PrivateKey
          */
         protected static Tuple readRandomKeyPair(String algorithm) throws Exception {
             int keySize;
    diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java
    index 11ee0a6a0012e..9e33e145e24fe 100644
    --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java
    +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java
    @@ -122,8 +122,8 @@
     import org.elasticsearch.xpack.security.audit.AuditTrailService;
     import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
     import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore;
    -import org.elasticsearch.xpack.sql.plugin.SqlQueryAction;
    -import org.elasticsearch.xpack.sql.plugin.SqlQueryRequest;
    +import org.elasticsearch.xpack.sql.action.SqlQueryAction;
    +import org.elasticsearch.xpack.sql.action.SqlQueryRequest;
     import org.junit.Before;
     import org.mockito.Mockito;
     
    diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle
    index 19dd1a08ec6f6..436837e85b4f0 100644
    --- a/x-pack/plugin/sql/build.gradle
    +++ b/x-pack/plugin/sql/build.gradle
    @@ -24,7 +24,7 @@ dependencies {
             // exclude ASM to not affect featureAware task on Java 10+
             exclude group: "org.ow2.asm"
         }
    -    compile project('sql-proto')
    +    compile project('sql-action')
         compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}"
         compile "org.antlr:antlr4-runtime:4.5.3"
         testCompile "org.elasticsearch.test:framework:${version}"
    diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle
    index e383e71cd4c76..ca8d966a031cf 100644
    --- a/x-pack/plugin/sql/jdbc/build.gradle
    +++ b/x-pack/plugin/sql/jdbc/build.gradle
    @@ -24,10 +24,10 @@ forbiddenApisMain {
     }
     
     dependencies {
    -    compile (xpackProject('plugin:sql:sql-shared-client')) {
    +    compile (xpackProject('plugin:sql:sql-client')) {
             transitive = false
         }
    -    compile (xpackProject('plugin:sql:sql-shared-proto')) {
    +    compile (xpackProject('plugin:sql:sql-proto')) {
             transitive = false
         }
         compile (project(':libs:x-content')) {
    @@ -39,12 +39,12 @@ dependencies {
     }
     
     dependencyLicenses {
    -    mapping from: /sql-shared-proto.*/, to: 'elasticsearch'
    -    mapping from: /sql-shared-client.*/, to: 'elasticsearch'
    +    mapping from: /sql-proto.*/, to: 'elasticsearch'
    +    mapping from: /sql-client.*/, to: 'elasticsearch'
         mapping from: /jackson-.*/, to: 'jackson'
         mapping from: /elasticsearch-core.*/, to: 'elasticsearch'
    -    ignoreSha 'sql-shared-proto'
    -    ignoreSha 'sql-shared-client'
    +    ignoreSha 'sql-proto'
    +    ignoreSha 'sql-client'
         ignoreSha 'elasticsearch'
     }
     
    diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/Debug.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/Debug.java
    index 41bc80c104052..ccba7429a4bb2 100644
    --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/Debug.java
    +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/Debug.java
    @@ -5,7 +5,7 @@
      */
     package org.elasticsearch.xpack.sql.jdbc.debug;
     
    -import org.elasticsearch.xpack.sql.client.shared.SuppressForbidden;
    +import org.elasticsearch.xpack.sql.client.SuppressForbidden;
     import org.elasticsearch.xpack.sql.jdbc.JdbcException;
     import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
     
    @@ -39,10 +39,10 @@
      * being global and not working well with encoding (hence why {@link DriverManager#getLogWriter()} was introduced)
      * and was changed again through {@link DataSource#getLogWriter()}.
      * However by then the damage was done and most drivers don't use either and have their own logging implementation.
    - * 
    + *
      * This class tries to cater to both audiences - use the legacy, Writer way if needed though strive to use the
      * proper typical approach, that of specifying intention and output (file) in the URL.
    - * 
    + *
      * For this reason the {@link System#out} and {@link System#err} are being refered in this class though are used only
      * when needed.
      */
    @@ -65,7 +65,7 @@ public final class Debug {
          * {@link DriverManager#setLogWriter(PrintWriter)} and {@link DataSource#setLogWriter(PrintWriter)}.
          * The former is the 'legacy' way, having a global impact on all drivers while the latter allows per
          * instance configuration.
    -     * 
    +     *
          * As both approaches are not widely used, Debug will take the principle of least surprise and pick its
          * own configuration first; if that does not exist it will fallback to the managed approaches (assuming they
          * are specified, otherwise logging is simply disabled).
    @@ -235,4 +235,4 @@ private static PrintStream stdout() {
         private static PrintStream stderr() {
             return System.err;
         }
    -}
    \ No newline at end of file
    +}
    diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/DebugLog.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/DebugLog.java
    index 28444c1f8ee0c..e0962c5dcc1a6 100644
    --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/DebugLog.java
    +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/DebugLog.java
    @@ -5,7 +5,7 @@
      */
     package org.elasticsearch.xpack.sql.jdbc.debug;
     
    -import org.elasticsearch.xpack.sql.client.shared.StringUtils;
    +import org.elasticsearch.xpack.sql.client.StringUtils;
     
     import java.io.PrintWriter;
     import java.lang.reflect.Array;
    @@ -56,16 +56,16 @@ void logException(Method m, Object[] args, Throwable t) {
             t.printStackTrace(print);
             print.flush();
         }
    -    
    +
     
         private static String array(Object[] a) {
             if (a == null || a.length == 0) {
    -            return StringUtils.EMPTY; 
    +            return StringUtils.EMPTY;
             }
             if (a.length == 1) {
                 return handleArray(a[0]);
             }
    -        
    +
             StringBuilder b = new StringBuilder();
             int iMax = a.length - 1;
             for (int i = 0; ; i++) {
    @@ -97,4 +97,4 @@ private static String handleArray(Object o) {
             }
             return String.valueOf(o);
         }
    -}
    \ No newline at end of file
    +}
    diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConfiguration.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConfiguration.java
    index 1d90c8c08ca89..a2ab9060b5aa3 100644
    --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConfiguration.java
    +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConfiguration.java
    @@ -5,9 +5,9 @@
      */
     package org.elasticsearch.xpack.sql.jdbc.jdbc;
     
    -import org.elasticsearch.xpack.sql.client.shared.ConnectionConfiguration;
    -import org.elasticsearch.xpack.sql.client.shared.StringUtils;
    -import org.elasticsearch.xpack.sql.client.shared.Version;
    +import org.elasticsearch.xpack.sql.client.ConnectionConfiguration;
    +import org.elasticsearch.xpack.sql.client.StringUtils;
    +import org.elasticsearch.xpack.sql.client.Version;
     import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
     
     import java.net.URI;
    @@ -22,8 +22,8 @@
     import java.util.TimeZone;
     import java.util.concurrent.TimeUnit;
     
    -import static org.elasticsearch.xpack.sql.client.shared.UriUtils.parseURI;
    -import static org.elasticsearch.xpack.sql.client.shared.UriUtils.removeQuery;
    +import static org.elasticsearch.xpack.sql.client.UriUtils.parseURI;
    +import static org.elasticsearch.xpack.sql.client.UriUtils.removeQuery;
     
     /**
      / Supports the following syntax
    diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java
    index fbbb030bc4973..5cb63a3376348 100644
    --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java
    +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDatabaseMetaData.java
    @@ -5,8 +5,8 @@
      */
     package org.elasticsearch.xpack.sql.jdbc.jdbc;
     
    -import org.elasticsearch.xpack.sql.client.shared.ObjectUtils;
    -import org.elasticsearch.xpack.sql.client.shared.Version;
    +import org.elasticsearch.xpack.sql.client.ObjectUtils;
    +import org.elasticsearch.xpack.sql.client.Version;
     import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
     import org.elasticsearch.xpack.sql.jdbc.net.client.Cursor;
     import org.elasticsearch.xpack.sql.jdbc.net.protocol.ColumnInfo;
    diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDriver.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDriver.java
    index 3fdb002a0aa94..2b68026b67816 100644
    --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDriver.java
    +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcDriver.java
    @@ -5,7 +5,7 @@
      */
     package org.elasticsearch.xpack.sql.jdbc.jdbc;
     
    -import org.elasticsearch.xpack.sql.client.shared.Version;
    +import org.elasticsearch.xpack.sql.client.Version;
     import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
     import org.elasticsearch.xpack.sql.jdbc.debug.Debug;
     
    @@ -127,4 +127,4 @@ public Logger getParentLogger() throws SQLFeatureNotSupportedException {
         private void close() {
             Debug.close();
         }
    -}
    \ No newline at end of file
    +}
    diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatement.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatement.java
    index 5ace03ff8a33e..bae4260ac2b69 100644
    --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatement.java
    +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatement.java
    @@ -5,6 +5,8 @@
      */
     package org.elasticsearch.xpack.sql.jdbc.jdbc;
     
    +import org.elasticsearch.xpack.sql.type.DataType;
    +
     import java.io.InputStream;
     import java.io.Reader;
     import java.math.BigDecimal;
    @@ -21,13 +23,24 @@
     import java.sql.ResultSet;
     import java.sql.ResultSetMetaData;
     import java.sql.RowId;
    +import java.sql.SQLDataException;
     import java.sql.SQLException;
     import java.sql.SQLFeatureNotSupportedException;
     import java.sql.SQLXML;
    +import java.sql.Struct;
     import java.sql.Time;
     import java.sql.Timestamp;
     import java.sql.Types;
    +import java.time.LocalDate;
    +import java.time.LocalDateTime;
    +import java.time.LocalTime;
    +import java.time.OffsetDateTime;
    +import java.time.OffsetTime;
    +import java.util.ArrayList;
    +import java.util.Arrays;
     import java.util.Calendar;
    +import java.util.List;
    +import java.util.Locale;
     
     class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement {
         final PreparedQuery query;
    @@ -74,67 +87,67 @@ public void setNull(int parameterIndex, int sqlType) throws SQLException {
     
         @Override
         public void setBoolean(int parameterIndex, boolean x) throws SQLException {
    -        setParam(parameterIndex, x, Types.BOOLEAN);
    +        setObject(parameterIndex, x, Types.BOOLEAN);
         }
     
         @Override
         public void setByte(int parameterIndex, byte x) throws SQLException {
    -        setParam(parameterIndex, x, Types.TINYINT);
    +        setObject(parameterIndex, x, Types.TINYINT);
         }
     
         @Override
         public void setShort(int parameterIndex, short x) throws SQLException {
    -        setParam(parameterIndex, x, Types.SMALLINT);
    +        setObject(parameterIndex, x, Types.SMALLINT);
         }
     
         @Override
         public void setInt(int parameterIndex, int x) throws SQLException {
    -        setParam(parameterIndex, x, Types.INTEGER);
    +        setObject(parameterIndex, x, Types.INTEGER);
         }
     
         @Override
         public void setLong(int parameterIndex, long x) throws SQLException {
    -        setParam(parameterIndex, x, Types.BIGINT);
    +        setObject(parameterIndex, x, Types.BIGINT);
         }
     
         @Override
         public void setFloat(int parameterIndex, float x) throws SQLException {
    -        setParam(parameterIndex, x, Types.REAL);
    +        setObject(parameterIndex, x, Types.REAL);
         }
     
         @Override
         public void setDouble(int parameterIndex, double x) throws SQLException {
    -        setParam(parameterIndex, x, Types.DOUBLE);
    +        setObject(parameterIndex, x, Types.DOUBLE);
         }
     
         @Override
         public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException {
    -        throw new SQLFeatureNotSupportedException("BigDecimal not supported");
    +        setObject(parameterIndex, x, Types.BIGINT);
         }
     
         @Override
         public void setString(int parameterIndex, String x) throws SQLException {
    -        setParam(parameterIndex, x, Types.VARCHAR);
    +        setObject(parameterIndex, x, Types.VARCHAR);
         }
     
         @Override
         public void setBytes(int parameterIndex, byte[] x) throws SQLException {
    -        throw new UnsupportedOperationException("Bytes not implemented yet");
    +        setObject(parameterIndex, x, Types.VARBINARY);
         }
     
         @Override
         public void setDate(int parameterIndex, Date x) throws SQLException {
    -        throw new UnsupportedOperationException("Date/Time not implemented yet");
    +        setObject(parameterIndex, x, Types.TIMESTAMP);
         }
     
         @Override
         public void setTime(int parameterIndex, Time x) throws SQLException {
    -        throw new UnsupportedOperationException("Date/Time not implemented yet");
    +        setObject(parameterIndex, x, Types.TIMESTAMP);
         }
     
         @Override
         public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException {
    -        throw new UnsupportedOperationException("Date/Time not implemented yet");
    +        setObject(parameterIndex, x, Types.TIMESTAMP);
         }
     
         @Override
    @@ -161,12 +174,22 @@ public void clearParameters() throws SQLException {
     
         @Override
         public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException {
    -        throw new UnsupportedOperationException("Object not implemented yet");
    +        // the value of scaleOrLength parameter doesn't matter, as it's not used in the called method below
    +        setObject(parameterIndex, x, targetSqlType, 0);
         }
     
         @Override
         public void setObject(int parameterIndex, Object x) throws SQLException {
    -        throw new SQLFeatureNotSupportedException("CharacterStream not supported");
    +        if (x == null) {
    +            setParam(parameterIndex, null, Types.NULL);
    +            return;
    +        }
    +        
    +        // check also here the unsupported types so that any unsupported interfaces ({@code java.sql.Struct},
    +        // {@code java.sql.Array} etc) will generate the correct exception message. Otherwise, the method call
    +        // {@code TypeConverter.fromJavaToJDBC(x.getClass())} will report the implementing class as not being supported.
    +        checkKnownUnsupportedTypes(x);
    +        setObject(parameterIndex, x, TypeConverter.fromJavaToJDBC(x.getClass()).getVendorTypeNumber(), 0);
         }
     
         @Override
    @@ -181,22 +204,22 @@ public void setCharacterStream(int parameterIndex, Reader reader, int length) th
     
         @Override
         public void setRef(int parameterIndex, Ref x) throws SQLException {
    -        throw new SQLFeatureNotSupportedException("Ref not supported");
    +        setObject(parameterIndex, x);
         }
     
         @Override
         public void setBlob(int parameterIndex, Blob x) throws SQLException {
    -        throw new SQLFeatureNotSupportedException("Blob not supported");
    +        setObject(parameterIndex, x);
         }
     
         @Override
         public void setClob(int parameterIndex, Clob x) throws SQLException {
    -        throw new SQLFeatureNotSupportedException("Clob not supported");
    +        setObject(parameterIndex, x);
         }
     
         @Override
         public void setArray(int parameterIndex, Array x) throws SQLException {
    -        throw new SQLFeatureNotSupportedException("Array not supported");
    +        setObject(parameterIndex, x);
         }
     
         @Override
    @@ -206,17 +229,44 @@ public ResultSetMetaData getMetaData() throws SQLException {
     
         @Override
         public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException {
    -        throw new UnsupportedOperationException("Dates not implemented yet");
    +        if (cal == null) {
    +            setObject(parameterIndex, x, Types.TIMESTAMP);
    +            return;
    +        }
    +        if (x == null) {
    +            setNull(parameterIndex, Types.TIMESTAMP);
    +            return;
    +        }
    +        // converting to UTC since this is what ES is storing internally
    +        setObject(parameterIndex, new Date(TypeConverter.convertFromCalendarToUTC(x.getTime(), cal)), Types.TIMESTAMP);
         }
     
         @Override
         public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException {
    -        throw new UnsupportedOperationException("Dates not implemented yet");
    +        if (cal == null) {
    +            setObject(parameterIndex, x, Types.TIMESTAMP);
    +            return;
    +        }
    +        if (x == null) {
    +            setNull(parameterIndex, Types.TIMESTAMP);
    +            return;
    +        }
    +        // converting to UTC since this is what ES is storing internally
    +        setObject(parameterIndex, new Time(TypeConverter.convertFromCalendarToUTC(x.getTime(), cal)), Types.TIMESTAMP);
         }
     
         @Override
         public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException {
    -        throw new UnsupportedOperationException("Dates not implemented yet");
    +        if (cal == null) {
    +            setObject(parameterIndex, x, Types.TIMESTAMP);
    +            return;
    +        }
    +        if (x == null) {
    +            setNull(parameterIndex, Types.TIMESTAMP);
    +            return;
    +        }
    +        // converting to UTC since this is what ES is storing internally
    +        setObject(parameterIndex, new Timestamp(TypeConverter.convertFromCalendarToUTC(x.getTime(), cal)), Types.TIMESTAMP);
         }
     
         @Override
    @@ -226,7 +276,7 @@ public void setNull(int parameterIndex, int sqlType, String typeName) throws SQL
     
         @Override
         public void setURL(int parameterIndex, URL x) throws SQLException {
    -        throw new SQLFeatureNotSupportedException("Datalink not supported");
    +        setObject(parameterIndex, x);
         }
     
         @Override
    @@ -236,7 +286,7 @@ public ParameterMetaData getParameterMetaData() throws SQLException {
     
         @Override
         public void setRowId(int parameterIndex, RowId x) throws SQLException {
    -        throw new SQLFeatureNotSupportedException("RowId not supported");
    +        setObject(parameterIndex, x);
         }
     
         @Override
    @@ -251,7 +301,7 @@ public void setNCharacterStream(int parameterIndex, Reader value, long length) t
     
         @Override
         public void setNClob(int parameterIndex, NClob value) throws SQLException {
    -        throw new SQLFeatureNotSupportedException("NClob not supported");
    +        setObject(parameterIndex, value);
         }
     
         @Override
    @@ -271,12 +321,108 @@ public void setNClob(int parameterIndex, Reader reader, long length) throws SQLE
     
         @Override
         public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException {
    -        throw new SQLFeatureNotSupportedException("SQLXML not supported");
    +        setObject(parameterIndex, xmlObject);
         }
    -
    +    
         @Override
         public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException {
    -        throw new UnsupportedOperationException("Object not implemented yet");
    +        checkOpen();
    +        
    +        JDBCType targetJDBCType;
    +        try {
    +            // this is also a way to check early for the validity of the desired sql type
    +            targetJDBCType = JDBCType.valueOf(targetSqlType);
    +        } catch (IllegalArgumentException e) {
    +            throw new SQLDataException(e.getMessage());
    +        }
    +        
    +        // set the null value on the type and exit
    +        if (x == null) {
    +            setParam(parameterIndex, null, targetSqlType);
    +            return;
    +        }
    +        
    +        checkKnownUnsupportedTypes(x);
    +        if (x instanceof byte[]) {
    +            if (targetJDBCType != JDBCType.VARBINARY) {
    +                throw new SQLFeatureNotSupportedException(
    +                        "Conversion from type byte[] to " + targetJDBCType + " not supported");
    +            }
    +            setParam(parameterIndex, x, Types.VARBINARY);
    +            return;
    +        }
    +        
    +        if (x instanceof Timestamp
    +                || x instanceof Calendar
    +                || x instanceof Date
    +                || x instanceof LocalDateTime
    +                || x instanceof Time
    +                || x instanceof java.util.Date) 
    +        {
    +            if (targetJDBCType == JDBCType.TIMESTAMP) {
    +                // converting to {@code java.util.Date} because this is the type supported by {@code XContentBuilder} for serialization
    +                java.util.Date dateToSet;
    +                if (x instanceof Timestamp) {
    +                    dateToSet = new java.util.Date(((Timestamp) x).getTime());
    +                } else if (x instanceof Calendar) {
    +                    dateToSet = ((Calendar) x).getTime();
    +                } else if (x instanceof Date) {
    +                    dateToSet = new java.util.Date(((Date) x).getTime());
    +                } else if (x instanceof LocalDateTime){
    +                    LocalDateTime ldt = (LocalDateTime) x;
    +                    Calendar cal = getDefaultCalendar();
    +                    cal.set(ldt.getYear(), ldt.getMonthValue() - 1, ldt.getDayOfMonth(), ldt.getHour(), ldt.getMinute(), ldt.getSecond());
    +                    
    +                    dateToSet = cal.getTime();
    +                } else if (x instanceof Time) {
    +                    dateToSet = new java.util.Date(((Time) x).getTime());
    +                } else {
    +                    dateToSet = (java.util.Date) x;
    +                }
    +
    +                setParam(parameterIndex, dateToSet, Types.TIMESTAMP);
    +                return;
    +            } else if (targetJDBCType == JDBCType.VARCHAR) {
    +                setParam(parameterIndex, String.valueOf(x), Types.VARCHAR);
    +                return;
    +            }
    +            // anything else other than VARCHAR and TIMESTAMP is not supported in this JDBC driver
    +            throw new SQLFeatureNotSupportedException(
    +                    "Conversion from type " + x.getClass().getName() + " to " + targetJDBCType + " not supported");
    +        }
    +        
    +        if (x instanceof Boolean
    +                || x instanceof Byte
    +                || x instanceof Short
    +                || x instanceof Integer
    +                || x instanceof Long
    +                || x instanceof Float
    +                || x instanceof Double
    +                || x instanceof String) {
    +            setParam(parameterIndex, 
    +                    TypeConverter.convert(x, TypeConverter.fromJavaToJDBC(x.getClass()), DataType.fromJdbcTypeToJava(targetJDBCType)), 
    +                    targetSqlType);
    +            return;
    +        }
    +        
    +        throw new SQLFeatureNotSupportedException(
    +                "Conversion from type " + x.getClass().getName() + " to " + targetJDBCType + " not supported");
    +    }
    +
    +    private void checkKnownUnsupportedTypes(Object x) throws SQLFeatureNotSupportedException {
    +        List> unsupportedTypes = new ArrayList>(Arrays.asList(Struct.class, Array.class, SQLXML.class,
    +                RowId.class, Ref.class, Blob.class, NClob.class, Clob.class, LocalDate.class, LocalTime.class, 
    +                OffsetTime.class, OffsetDateTime.class, URL.class, BigDecimal.class));
    +        
    +        for (Class clazz:unsupportedTypes) {
    +           if (clazz.isAssignableFrom(x.getClass())) {
    +                throw new SQLFeatureNotSupportedException("Objects of type " + clazz.getName() + " are not supported");
    +           }
    +        }
    +    }
    +    
    +    private Calendar getDefaultCalendar() {
    +        return Calendar.getInstance(cfg.timeZone(), Locale.ROOT);
         }
     
         @Override
    diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java
    index c92ac9c5ac91c..351ac73a88f28 100644
    --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java
    +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java
    @@ -359,14 +359,6 @@ private  T convert(int columnIndex, Class type) throws SQLException {
                 return null;
             }
     
    -        if (type != null && type.isInstance(val)) {
    -            try {
    -                return type.cast(val);
    -            } catch (ClassCastException cce) {
    -                throw new SQLException("unable to convert column " + columnIndex + " to " + type, cce);
    -            }
    -        }
    -
             JDBCType columnType = cursor.columns().get(columnIndex - 1).type;
             
             return TypeConverter.convert(val, columnType, type);
    diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java
    index a1fa04ef1afd0..1e24a03c8b31c 100644
    --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java
    +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java
    @@ -10,7 +10,9 @@
     
     import java.sql.Date;
     import java.sql.JDBCType;
    +import java.sql.SQLDataException;
     import java.sql.SQLException;
    +import java.sql.SQLFeatureNotSupportedException;
     import java.sql.Time;
     import java.sql.Timestamp;
     import java.time.LocalDate;
    @@ -18,10 +20,17 @@
     import java.time.LocalTime;
     import java.time.OffsetDateTime;
     import java.time.OffsetTime;
    +import java.time.ZoneOffset;
    +import java.time.ZonedDateTime;
    +import java.util.Arrays;
     import java.util.Calendar;
    +import java.util.Collections;
     import java.util.GregorianCalendar;
     import java.util.Locale;
    +import java.util.Map;
    +import java.util.Map.Entry;
     import java.util.function.Function;
    +import java.util.stream.Collectors;
     
     import static java.lang.String.format;
     import static java.util.Calendar.DAY_OF_MONTH;
    @@ -48,6 +57,22 @@ private TypeConverter() {
         }
     
         private static final long DAY_IN_MILLIS = 60 * 60 * 24;
    +    private static final Map, JDBCType> javaToJDBC;
    +    
    +    static {
    +        Map, JDBCType> aMap = Arrays.stream(DataType.values())
    +                .filter(dataType -> dataType.javaClass() != null 
    +                        && dataType != DataType.HALF_FLOAT 
    +                        && dataType != DataType.SCALED_FLOAT 
    +                        && dataType != DataType.TEXT)
    +                .collect(Collectors.toMap(dataType -> dataType.javaClass(), dataType -> dataType.jdbcType));
    +        // apart from the mappings in {@code DataType} three more Java classes can be mapped to a {@code JDBCType.TIMESTAMP}
    +        // according to B-4 table from the jdbc4.2 spec
    +        aMap.put(Calendar.class, JDBCType.TIMESTAMP);
    +        aMap.put(java.util.Date.class, JDBCType.TIMESTAMP);
    +        aMap.put(LocalDateTime.class, JDBCType.TIMESTAMP);
    +        javaToJDBC = Collections.unmodifiableMap(aMap);
    +    }
     
         /**
          * Converts millisecond after epoc to date
    @@ -94,6 +119,20 @@ private static  T dateTimeConvert(Long millis, Calendar c, Function T convert(Object val, JDBCType columnType, Class type) throws SQLE
             if (type == null) {
                 return (T) convert(val, columnType);
             }
    +        
    +        if (type.isInstance(val)) {
    +            try {
    +                return type.cast(val);
    +            } catch (ClassCastException cce) {
    +                throw new SQLDataException("Unable to convert " + val.getClass().getName() + " to " + columnType, cce);
    +            }
    +        }
    +        
             if (type == String.class) {
                 return (T) asString(convert(val, columnType));
             }
    @@ -174,10 +222,10 @@ public static String classNameOf(JDBCType jdbcType) throws JdbcSQLException {
                 // Convert unsupported exception to JdbcSQLException
                 throw new JdbcSQLException(ex, ex.getMessage());
             }
    -        if (dataType.javaName == null) {
    +        if (dataType.javaClass() == null) {
                 throw new JdbcSQLException("Unsupported JDBC type [" + jdbcType + "]");
             }
    -        return dataType.javaName;
    +        return dataType.javaClass().getName();
         }
     
         /**
    @@ -228,6 +276,18 @@ static boolean isSigned(JDBCType jdbcType) throws SQLException {
             }
             return dataType.isSigned();
         }
    +    
    +    
    +    static JDBCType fromJavaToJDBC(Class clazz) throws SQLException {
    +        for (Entry, JDBCType> e : javaToJDBC.entrySet()) {
    +            // java.util.Calendar from {@code javaToJDBC} is an abstract class and this method can be used with concrete classes as well
    +            if (e.getKey().isAssignableFrom(clazz)) {
    +                return e.getValue();
    +            }
    +        }
    +        
    +        throw new SQLFeatureNotSupportedException("Objects of type " + clazz.getName() + " are not supported");
    +    }
     
         private static Double doubleValue(Object v) {
             if (v instanceof String) {
    @@ -275,7 +335,7 @@ private static Boolean asBoolean(Object val, JDBCType columnType) throws SQLExce
                 case REAL:
                 case FLOAT:
                 case DOUBLE:
    -                return Boolean.valueOf(Integer.signum(((Number) val).intValue()) == 0);
    +                return Boolean.valueOf(Integer.signum(((Number) val).intValue()) != 0);
                 default:
                     throw new SQLException("Conversion from type [" + columnType + "] to [Boolean] not supported");
     
    @@ -454,28 +514,28 @@ private static long utcMillisRemoveDate(long l) {
     
         private static byte safeToByte(long x) throws SQLException {
             if (x > Byte.MAX_VALUE || x < Byte.MIN_VALUE) {
    -            throw new SQLException(format(Locale.ROOT, "Numeric %d out of range", Long.toString(x)));
    +            throw new SQLException(format(Locale.ROOT, "Numeric %s out of range", Long.toString(x)));
             }
             return (byte) x;
         }
     
         private static short safeToShort(long x) throws SQLException {
             if (x > Short.MAX_VALUE || x < Short.MIN_VALUE) {
    -            throw new SQLException(format(Locale.ROOT, "Numeric %d out of range", Long.toString(x)));
    +            throw new SQLException(format(Locale.ROOT, "Numeric %s out of range", Long.toString(x)));
             }
             return (short) x;
         }
     
         private static int safeToInt(long x) throws SQLException {
             if (x > Integer.MAX_VALUE || x < Integer.MIN_VALUE) {
    -            throw new SQLException(format(Locale.ROOT, "Numeric %d out of range", Long.toString(x)));
    +            throw new SQLException(format(Locale.ROOT, "Numeric %s out of range", Long.toString(x)));
             }
             return (int) x;
         }
     
         private static long safeToLong(double x) throws SQLException {
             if (x > Long.MAX_VALUE || x < Long.MIN_VALUE) {
    -            throw new SQLException(format(Locale.ROOT, "Numeric %d out of range", Double.toString(x)));
    +            throw new SQLException(format(Locale.ROOT, "Numeric %s out of range", Double.toString(x)));
             }
             return Math.round(x);
         }
    diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbcx/JdbcDataSource.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbcx/JdbcDataSource.java
    index 595cd893ebc76..bd2776e133503 100644
    --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbcx/JdbcDataSource.java
    +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbcx/JdbcDataSource.java
    @@ -5,11 +5,11 @@
      */
     package org.elasticsearch.xpack.sql.jdbc.jdbcx;
     
    -import org.elasticsearch.xpack.sql.client.shared.ConnectionConfiguration;
    +import org.elasticsearch.xpack.sql.client.ConnectionConfiguration;
     import org.elasticsearch.xpack.sql.jdbc.debug.Debug;
     import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
     import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConnection;
    -import org.elasticsearch.xpack.sql.client.shared.Version;
    +import org.elasticsearch.xpack.sql.client.Version;
     
     import java.io.PrintWriter;
     import java.sql.Connection;
    @@ -117,4 +117,4 @@ public  T unwrap(Class iface) throws SQLException {
             }
             throw new SQLException();
         }
    -}
    \ No newline at end of file
    +}
    diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/net/client/JdbcHttpClient.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/net/client/JdbcHttpClient.java
    index 17afc34efffe6..e32aadcd7fa2b 100644
    --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/net/client/JdbcHttpClient.java
    +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/net/client/JdbcHttpClient.java
    @@ -8,7 +8,7 @@
     import org.elasticsearch.common.collect.Tuple;
     import org.elasticsearch.common.unit.TimeValue;
     import org.elasticsearch.xpack.sql.client.HttpClient;
    -import org.elasticsearch.xpack.sql.client.shared.Version;
    +import org.elasticsearch.xpack.sql.client.Version;
     import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
     import org.elasticsearch.xpack.sql.jdbc.net.protocol.ColumnInfo;
     import org.elasticsearch.xpack.sql.jdbc.net.protocol.InfoResponse;
    @@ -23,7 +23,7 @@
     import java.util.List;
     import java.util.stream.Collectors;
     
    -import static org.elasticsearch.xpack.sql.client.shared.StringUtils.EMPTY;
    +import static org.elasticsearch.xpack.sql.client.StringUtils.EMPTY;
     
     /**
      * JDBC specific HTTP client.
    diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfigurationTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfigurationTests.java
    index e15d011e387e3..c7f2b50ace019 100644
    --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfigurationTests.java
    +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfigurationTests.java
    @@ -11,8 +11,8 @@
     import java.sql.SQLException;
     import java.util.Properties;
     
    -import static org.elasticsearch.xpack.sql.client.shared.ConnectionConfiguration.CONNECT_TIMEOUT;
    -import static org.elasticsearch.xpack.sql.client.shared.ConnectionConfiguration.PAGE_TIMEOUT;
    +import static org.elasticsearch.xpack.sql.client.ConnectionConfiguration.CONNECT_TIMEOUT;
    +import static org.elasticsearch.xpack.sql.client.ConnectionConfiguration.PAGE_TIMEOUT;
     import static org.hamcrest.Matchers.equalTo;
     import static org.hamcrest.Matchers.is;
     
    diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/VersionTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/VersionTests.java
    index 82cd623f11599..03986e595442a 100644
    --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/VersionTests.java
    +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/VersionTests.java
    @@ -6,7 +6,7 @@
     package org.elasticsearch.xpack.sql.jdbc;
     
     import org.elasticsearch.test.ESTestCase;
    -import org.elasticsearch.xpack.sql.client.shared.Version;
    +import org.elasticsearch.xpack.sql.client.Version;
     
     public class VersionTests extends ESTestCase {
         public void testVersionIsCurrent() {
    diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java
    new file mode 100644
    index 0000000000000..ad96825896e1a
    --- /dev/null
    +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java
    @@ -0,0 +1,582 @@
    +/*
    + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    + * or more contributor license agreements. Licensed under the Elastic License;
    + * you may not use this file except in compliance with the Elastic License.
    + */
    +package org.elasticsearch.xpack.sql.jdbc.jdbc;
    +
    +import org.elasticsearch.test.ESTestCase;
    +
    +import java.net.URL;
    +import java.nio.charset.StandardCharsets;
    +import java.sql.JDBCType;
    +import java.sql.SQLException;
    +import java.sql.SQLFeatureNotSupportedException;
    +import java.sql.Struct;
    +import java.sql.Time;
    +import java.sql.Timestamp;
    +import java.sql.Types;
    +import java.time.Clock;
    +import java.time.LocalDateTime;
    +import java.time.ZoneOffset;
    +import java.time.ZonedDateTime;
    +import java.util.Calendar;
    +import java.util.Date;
    +import java.util.Locale;
    +import java.util.Map;
    +
    +import static java.sql.JDBCType.BIGINT;
    +import static java.sql.JDBCType.BOOLEAN;
    +import static java.sql.JDBCType.DOUBLE;
    +import static java.sql.JDBCType.FLOAT;
    +import static java.sql.JDBCType.INTEGER;
    +import static java.sql.JDBCType.REAL;
    +import static java.sql.JDBCType.SMALLINT;
    +import static java.sql.JDBCType.TIMESTAMP;
    +import static java.sql.JDBCType.TINYINT;
    +import static java.sql.JDBCType.VARBINARY;
    +import static java.sql.JDBCType.VARCHAR;
    +
    +public class JdbcPreparedStatementTests extends ESTestCase {
    +    
    +    public void testSettingBooleanValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        jps.setBoolean(1, true);
    +        assertEquals(true, value(jps));
    +        assertEquals(BOOLEAN, jdbcType(jps));
    +        
    +        jps.setObject(1, false);
    +        assertEquals(false, value(jps));
    +        assertEquals(BOOLEAN, jdbcType(jps));
    +        
    +        jps.setObject(1, true, Types.BOOLEAN);
    +        assertEquals(true, value(jps));
    +        assertEquals(BOOLEAN, jdbcType(jps));
    +        assertTrue(value(jps) instanceof Boolean);
    +        
    +        jps.setObject(1, true, Types.INTEGER);
    +        assertEquals(1, value(jps));
    +        assertEquals(INTEGER, jdbcType(jps));
    +        
    +        jps.setObject(1, true, Types.VARCHAR);
    +        assertEquals("true", value(jps));
    +        assertEquals(VARCHAR, jdbcType(jps));
    +    }
    +
    +    public void testThrownExceptionsWhenSettingBooleanValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, true, Types.TIMESTAMP));
    +        assertEquals("Conversion from type [BOOLEAN] to [Timestamp] not supported", sqle.getMessage());
    +    }
    +    
    +    public void testSettingStringValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        jps.setString(1, "foo bar");
    +        assertEquals("foo bar", value(jps));
    +        assertEquals(VARCHAR, jdbcType(jps));
    +        
    +        jps.setObject(1, "foo bar");
    +        assertEquals("foo bar", value(jps));
    +        assertEquals(VARCHAR, jdbcType(jps));
    +        
    +        jps.setObject(1, "foo bar", Types.VARCHAR);
    +        assertEquals("foo bar", value(jps));
    +        assertEquals(VARCHAR, jdbcType(jps));
    +        assertTrue(value(jps) instanceof String);
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingStringValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, "foo bar", Types.INTEGER));
    +        assertEquals("Conversion from type [VARCHAR] to [Integer] not supported", sqle.getMessage());
    +    }
    +    
    +    public void testSettingByteTypeValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        jps.setByte(1, (byte) 6);
    +        assertEquals((byte) 6, value(jps));
    +        assertEquals(TINYINT, jdbcType(jps));
    +        
    +        jps.setObject(1, (byte) 6);
    +        assertEquals((byte) 6, value(jps));
    +        assertEquals(TINYINT, jdbcType(jps));
    +        assertTrue(value(jps) instanceof Byte);
    +        
    +        jps.setObject(1, (byte) 0, Types.BOOLEAN);
    +        assertEquals(false, value(jps));
    +        assertEquals(BOOLEAN, jdbcType(jps));
    +        
    +        jps.setObject(1, (byte) 123, Types.BOOLEAN);
    +        assertEquals(true, value(jps));
    +        assertEquals(BOOLEAN, jdbcType(jps));
    +        
    +        jps.setObject(1, (byte) 123, Types.INTEGER);
    +        assertEquals(123, value(jps));
    +        assertEquals(INTEGER, jdbcType(jps));
    +        
    +        jps.setObject(1, (byte) -128, Types.DOUBLE);
    +        assertEquals(-128.0, value(jps));
    +        assertEquals(DOUBLE, jdbcType(jps));
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingByteTypeValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, (byte) 6, Types.TIMESTAMP));
    +        assertEquals("Conversion from type [TINYINT] to [Timestamp] not supported", sqle.getMessage());
    +    }
    +    
    +    public void testSettingShortTypeValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        short someShort = randomShort();
    +        jps.setShort(1, someShort);
    +        assertEquals(someShort, value(jps));
    +        assertEquals(SMALLINT, jdbcType(jps));
    +        
    +        jps.setObject(1, someShort);
    +        assertEquals(someShort, value(jps));
    +        assertEquals(SMALLINT, jdbcType(jps));
    +        assertTrue(value(jps) instanceof Short);
    +        
    +        jps.setObject(1, (short) 1, Types.BOOLEAN);
    +        assertEquals(true, value(jps));
    +        assertEquals(BOOLEAN, jdbcType(jps));
    +        
    +        jps.setObject(1, (short) -32700, Types.DOUBLE);
    +        assertEquals(-32700.0, value(jps));
    +        assertEquals(DOUBLE, jdbcType(jps));
    +        
    +        jps.setObject(1, someShort, Types.INTEGER);
    +        assertEquals((int) someShort, value(jps));
    +        assertEquals(INTEGER, jdbcType(jps));
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingShortTypeValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, (short) 6, Types.TIMESTAMP));
    +        assertEquals("Conversion from type [SMALLINT] to [Timestamp] not supported", sqle.getMessage());
    +        
    +        sqle = expectThrows(SQLException.class, () -> jps.setObject(1, 256, Types.TINYINT));
    +        assertEquals("Numeric " + 256 + " out of range", sqle.getMessage());
    +    }
    +    
    +    public void testSettingIntegerValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        int someInt = randomInt();
    +        jps.setInt(1, someInt);
    +        assertEquals(someInt, value(jps));
    +        assertEquals(INTEGER, jdbcType(jps));
    +        
    +        jps.setObject(1, someInt);
    +        assertEquals(someInt, value(jps));
    +        assertEquals(INTEGER, jdbcType(jps));
    +        assertTrue(value(jps) instanceof Integer);
    +        
    +        jps.setObject(1, someInt, Types.VARCHAR);
    +        assertEquals(String.valueOf(someInt), value(jps));
    +        assertEquals(VARCHAR, jdbcType(jps));
    +        
    +        jps.setObject(1, someInt, Types.FLOAT);
    +        assertEquals(Double.valueOf(someInt), value(jps));
    +        assertTrue(value(jps) instanceof Double);
    +        assertEquals(FLOAT, jdbcType(jps));
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingIntegerValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        int someInt = randomInt();
    +        
    +        SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, someInt, Types.TIMESTAMP));
    +        assertEquals("Conversion from type [INTEGER] to [Timestamp] not supported", sqle.getMessage());
    +        
    +        Integer randomIntNotShort = randomIntBetween(32768, Integer.MAX_VALUE);
    +        sqle = expectThrows(SQLException.class, () -> jps.setObject(1, randomIntNotShort, Types.SMALLINT));
    +        assertEquals("Numeric " + randomIntNotShort + " out of range", sqle.getMessage());
    +        
    +        sqle = expectThrows(SQLException.class, () -> jps.setObject(1, randomIntNotShort, Types.TINYINT));
    +        assertEquals("Numeric " + randomIntNotShort + " out of range", sqle.getMessage());
    +    }
    +    
    +    public void testSettingLongValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        long someLong = randomLong();
    +        jps.setLong(1, someLong);
    +        assertEquals(someLong, value(jps));
    +        assertEquals(BIGINT, jdbcType(jps));
    +        
    +        jps.setObject(1, someLong);
    +        assertEquals(someLong, value(jps));
    +        assertEquals(BIGINT, jdbcType(jps));
    +        assertTrue(value(jps) instanceof Long);
    +        
    +        jps.setObject(1, someLong, Types.VARCHAR);
    +        assertEquals(String.valueOf(someLong), value(jps));
    +        assertEquals(VARCHAR, jdbcType(jps));
    +        
    +        jps.setObject(1, someLong, Types.DOUBLE);
    +        assertEquals((double) someLong, value(jps));
    +        assertEquals(DOUBLE, jdbcType(jps));
    +        
    +        jps.setObject(1, someLong, Types.FLOAT);
    +        assertEquals((double) someLong, value(jps));
    +        assertEquals(FLOAT, jdbcType(jps));
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingLongValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        long someLong = randomLong();
    +        
    +        SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, someLong, Types.TIMESTAMP));
    +        assertEquals("Conversion from type [BIGINT] to [Timestamp] not supported", sqle.getMessage());
    +        
    +        Long randomLongNotShort = randomLongBetween(Integer.MAX_VALUE + 1, Long.MAX_VALUE);
    +        sqle = expectThrows(SQLException.class, () -> jps.setObject(1, randomLongNotShort, Types.INTEGER));
    +        assertEquals("Numeric " + randomLongNotShort + " out of range", sqle.getMessage());
    +        
    +        sqle = expectThrows(SQLException.class, () -> jps.setObject(1, randomLongNotShort, Types.SMALLINT));
    +        assertEquals("Numeric " + randomLongNotShort + " out of range", sqle.getMessage());
    +    }
    +    
    +    public void testSettingFloatValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        float someFloat = randomFloat();
    +        jps.setFloat(1, someFloat);
    +        assertEquals(someFloat, value(jps));
    +        assertEquals(REAL, jdbcType(jps));
    +        
    +        jps.setObject(1, someFloat);
    +        assertEquals(someFloat, value(jps));
    +        assertEquals(REAL, jdbcType(jps));
    +        assertTrue(value(jps) instanceof Float);
    +        
    +        jps.setObject(1, someFloat, Types.VARCHAR);
    +        assertEquals(String.valueOf(someFloat), value(jps));
    +        assertEquals(VARCHAR, jdbcType(jps));
    +        
    +        jps.setObject(1, someFloat, Types.DOUBLE);
    +        assertEquals((double) someFloat, value(jps));
    +        assertEquals(DOUBLE, jdbcType(jps));
    +        
    +        jps.setObject(1, someFloat, Types.FLOAT);
    +        assertEquals((double) someFloat, value(jps));
    +        assertEquals(FLOAT, jdbcType(jps));
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingFloatValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        float someFloat = randomFloat();
    +        
    +        SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, someFloat, Types.TIMESTAMP));
    +        assertEquals("Conversion from type [REAL] to [Timestamp] not supported", sqle.getMessage());
    +        
    +        Float floatNotInt =  5_155_000_000f;
    +        sqle = expectThrows(SQLException.class, () -> jps.setObject(1, floatNotInt, Types.INTEGER));
    +        assertEquals(String.format(Locale.ROOT, "Numeric %s out of range", 
    +                Long.toString(Math.round(floatNotInt.doubleValue()))), sqle.getMessage());
    +        
    +        sqle = expectThrows(SQLException.class, () -> jps.setObject(1, floatNotInt, Types.SMALLINT));
    +        assertEquals(String.format(Locale.ROOT, "Numeric %s out of range", 
    +                Long.toString(Math.round(floatNotInt.doubleValue()))), sqle.getMessage());
    +    }
    +    
    +    public void testSettingDoubleValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        double someDouble = randomDouble();
    +        jps.setDouble(1, someDouble);
    +        assertEquals(someDouble, value(jps));
    +        assertEquals(DOUBLE, jdbcType(jps));
    +        
    +        jps.setObject(1, someDouble);
    +        assertEquals(someDouble, value(jps));
    +        assertEquals(DOUBLE, jdbcType(jps));
    +        assertTrue(value(jps) instanceof Double);
    +        
    +        jps.setObject(1, someDouble, Types.VARCHAR);
    +        assertEquals(String.valueOf(someDouble), value(jps));
    +        assertEquals(VARCHAR, jdbcType(jps));
    +        
    +        jps.setObject(1, someDouble, Types.REAL);
    +        assertEquals(new Float(someDouble), value(jps));
    +        assertEquals(REAL, jdbcType(jps));
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingDoubleValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        double someDouble = randomDouble();
    +        
    +        SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, someDouble, Types.TIMESTAMP));
    +        assertEquals("Conversion from type [DOUBLE] to [Timestamp] not supported", sqle.getMessage());
    +        
    +        Double doubleNotInt = 5_155_000_000d;
    +        sqle = expectThrows(SQLException.class, () -> jps.setObject(1, doubleNotInt, Types.INTEGER));
    +        assertEquals(String.format(Locale.ROOT, "Numeric %s out of range", 
    +                Long.toString(((Number) doubleNotInt).longValue())), sqle.getMessage());
    +    }
    +    
    +    public void testUnsupportedClasses() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        SQLFeatureNotSupportedException sfnse = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, new Struct() {
    +            @Override
    +            public String getSQLTypeName() throws SQLException {
    +                return null;
    +            }
    +            @Override
    +            public Object[] getAttributes(Map> map) throws SQLException {
    +                return null;
    +            }
    +            @Override
    +            public Object[] getAttributes() throws SQLException {
    +                return null;
    +            }
    +        }));
    +        assertEquals("Objects of type java.sql.Struct are not supported", sfnse.getMessage());
    +        
    +        sfnse = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, new URL("http://test")));
    +        assertEquals("Objects of type java.net.URL are not supported", sfnse.getMessage());
    +        
    +        sfnse = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setURL(1, new URL("http://test")));
    +        assertEquals("Objects of type java.net.URL are not supported", sfnse.getMessage());
    +        
    +        sfnse = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, this, Types.TIMESTAMP));
    +        assertEquals("Conversion from type " + this.getClass().getName() + " to TIMESTAMP not supported", sfnse.getMessage());
    +        
    +        SQLException se = expectThrows(SQLException.class, () -> jps.setObject(1, this, 1_000_000));
    +        assertEquals("Type:1000000 is not a valid Types.java value.", se.getMessage());
    +        
    +        IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> jps.setObject(1, randomShort(), Types.CHAR));
    +        assertEquals("Unsupported JDBC type [CHAR]", iae.getMessage());
    +    }
    +    
    +    public void testSettingTimestampValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +
    +        Timestamp someTimestamp = new Timestamp(randomMillisSinceEpoch());
    +        jps.setTimestamp(1, someTimestamp);
    +        assertEquals(someTimestamp.getTime(), ((Date)value(jps)).getTime());
    +        assertEquals(TIMESTAMP, jdbcType(jps));
    +        
    +        Calendar nonDefaultCal = randomCalendar();
    +        // February 29th, 2016. 01:17:55 GMT = 1456708675000 millis since epoch
    +        jps.setTimestamp(1, new Timestamp(1456708675000L), nonDefaultCal);
    +        assertEquals(1456708675000L, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal));
    +        assertEquals(TIMESTAMP, jdbcType(jps));
    +        
    +        long beforeEpochTime = -randomMillisSinceEpoch();
    +        jps.setTimestamp(1, new Timestamp(beforeEpochTime), nonDefaultCal);
    +        assertEquals(beforeEpochTime, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal));
    +        assertTrue(value(jps) instanceof java.util.Date);
    +        
    +        jps.setObject(1, someTimestamp, Types.VARCHAR);
    +        assertEquals(someTimestamp.toString(), value(jps).toString());
    +        assertEquals(VARCHAR, jdbcType(jps));
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingTimestampValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        Timestamp someTimestamp = new Timestamp(randomMillisSinceEpoch());
    +        
    +        SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, someTimestamp, Types.INTEGER));
    +        assertEquals("Conversion from type java.sql.Timestamp to INTEGER not supported", sqle.getMessage());
    +    }
    +
    +    public void testSettingTimeValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        Time time = new Time(4675000);
    +        Calendar nonDefaultCal = randomCalendar();
    +        jps.setTime(1, time, nonDefaultCal);
    +        assertEquals(4675000, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal));
    +        assertEquals(TIMESTAMP, jdbcType(jps));
    +        assertTrue(value(jps) instanceof java.util.Date);
    +        
    +        jps.setObject(1, time, Types.VARCHAR);
    +        assertEquals(time.toString(), value(jps).toString());
    +        assertEquals(VARCHAR, jdbcType(jps));
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingTimeValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        Time time = new Time(4675000);
    +        
    +        SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, time, Types.INTEGER));
    +        assertEquals("Conversion from type java.sql.Time to INTEGER not supported", sqle.getMessage());
    +    }
    +    
    +    public void testSettingSqlDateValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        java.sql.Date someSqlDate = new java.sql.Date(randomMillisSinceEpoch());
    +        jps.setDate(1, someSqlDate);
    +        assertEquals(someSqlDate.getTime(), ((Date)value(jps)).getTime());
    +        assertEquals(TIMESTAMP, jdbcType(jps));
    +        
    +        someSqlDate = new java.sql.Date(randomMillisSinceEpoch());
    +        Calendar nonDefaultCal = randomCalendar();
    +        jps.setDate(1, someSqlDate, nonDefaultCal);
    +        assertEquals(someSqlDate.getTime(), convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal));
    +        assertEquals(TIMESTAMP, jdbcType(jps));
    +        assertTrue(value(jps) instanceof java.util.Date);
    +        
    +        jps.setObject(1, someSqlDate, Types.VARCHAR);
    +        assertEquals(someSqlDate.toString(), value(jps).toString());
    +        assertEquals(VARCHAR, jdbcType(jps));
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingSqlDateValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        java.sql.Date someSqlDate = new java.sql.Date(randomMillisSinceEpoch());
    +        
    +        SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, 
    +                () -> jps.setObject(1, new java.sql.Date(randomMillisSinceEpoch()), Types.DOUBLE));
    +        assertEquals("Conversion from type " + someSqlDate.getClass().getName() + " to DOUBLE not supported", sqle.getMessage());
    +    }
    +    
    +    public void testSettingCalendarValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        Calendar someCalendar = randomCalendar();
    +        someCalendar.setTimeInMillis(randomMillisSinceEpoch());
    +        
    +        jps.setObject(1, someCalendar);
    +        assertEquals(someCalendar.getTime(), (Date) value(jps));
    +        assertEquals(TIMESTAMP, jdbcType(jps));
    +        assertTrue(value(jps) instanceof java.util.Date);
    +        
    +        jps.setObject(1, someCalendar, Types.VARCHAR);
    +        assertEquals(someCalendar.toString(), value(jps).toString());
    +        assertEquals(VARCHAR, jdbcType(jps));
    +        
    +        Calendar nonDefaultCal = randomCalendar();
    +        jps.setObject(1, nonDefaultCal);
    +        assertEquals(nonDefaultCal.getTime(), (Date) value(jps));
    +        assertEquals(TIMESTAMP, jdbcType(jps));
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingCalendarValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        Calendar someCalendar = randomCalendar();
    +        
    +        SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, someCalendar, Types.DOUBLE));
    +        assertEquals("Conversion from type " + someCalendar.getClass().getName() + " to DOUBLE not supported", sqle.getMessage());
    +    }
    +    
    +    public void testSettingDateValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        Date someDate = new Date(randomMillisSinceEpoch());
    +        
    +        jps.setObject(1, someDate);
    +        assertEquals(someDate, (Date) value(jps));
    +        assertEquals(TIMESTAMP, jdbcType(jps));
    +        assertTrue(value(jps) instanceof java.util.Date);
    +        
    +        jps.setObject(1, someDate, Types.VARCHAR);
    +        assertEquals(someDate.toString(), value(jps).toString());
    +        assertEquals(VARCHAR, jdbcType(jps));
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingDateValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        Date someDate = new Date(randomMillisSinceEpoch());
    +        
    +        SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, someDate, Types.BIGINT));
    +        assertEquals("Conversion from type " + someDate.getClass().getName() + " to BIGINT not supported", sqle.getMessage());
    +    }
    +    
    +    public void testSettingLocalDateTimeValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        LocalDateTime ldt = LocalDateTime.now(Clock.systemDefaultZone());
    +        
    +        jps.setObject(1, ldt);
    +        assertEquals(Date.class, value(jps).getClass());
    +        assertEquals(TIMESTAMP, jdbcType(jps));
    +        assertTrue(value(jps) instanceof java.util.Date);
    +        
    +        jps.setObject(1, ldt, Types.VARCHAR);
    +        assertEquals(ldt.toString(), value(jps).toString());
    +        assertEquals(VARCHAR, jdbcType(jps));
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingLocalDateTimeValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        LocalDateTime ldt = LocalDateTime.now(Clock.systemDefaultZone());
    +        
    +        SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, ldt, Types.BIGINT));
    +        assertEquals("Conversion from type " + ldt.getClass().getName() + " to BIGINT not supported", sqle.getMessage());
    +    }
    +    
    +    public void testSettingByteArrayValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        
    +        byte[] buffer = "some data".getBytes(StandardCharsets.UTF_8);
    +        jps.setBytes(1, buffer);
    +        assertEquals(byte[].class, value(jps).getClass());
    +        assertEquals(VARBINARY, jdbcType(jps));
    +        
    +        jps.setObject(1, buffer);
    +        assertEquals(byte[].class, value(jps).getClass());
    +        assertEquals(VARBINARY, jdbcType(jps));
    +        assertTrue(value(jps) instanceof byte[]);
    +        
    +        jps.setObject(1, buffer, Types.VARBINARY);
    +        assertEquals((byte[]) value(jps), buffer);
    +        assertEquals(VARBINARY, jdbcType(jps));
    +        
    +        SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, buffer, Types.VARCHAR));
    +        assertEquals("Conversion from type byte[] to VARCHAR not supported", sqle.getMessage());
    +        
    +        sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, buffer, Types.DOUBLE));
    +        assertEquals("Conversion from type byte[] to DOUBLE not supported", sqle.getMessage());
    +    }
    +    
    +    public void testThrownExceptionsWhenSettingByteArrayValues() throws SQLException {
    +        JdbcPreparedStatement jps = createJdbcPreparedStatement();
    +        byte[] buffer = "foo".getBytes(StandardCharsets.UTF_8);
    +        
    +        SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, buffer, Types.VARCHAR));
    +        assertEquals("Conversion from type byte[] to VARCHAR not supported", sqle.getMessage());
    +        
    +        sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, buffer, Types.DOUBLE));
    +        assertEquals("Conversion from type byte[] to DOUBLE not supported", sqle.getMessage());
    +    }
    +
    +    private long randomMillisSinceEpoch() {
    +        return randomLongBetween(0, System.currentTimeMillis());
    +    }
    +
    +    private JdbcPreparedStatement createJdbcPreparedStatement() throws SQLException {
    +        return new JdbcPreparedStatement(null, JdbcConfiguration.create("jdbc:es://l:1", null, 0), "?");
    +    }
    +
    +    private JDBCType jdbcType(JdbcPreparedStatement jps) throws SQLException {
    +        return jps.query.getParam(1).type;
    +    }
    +
    +    private Object value(JdbcPreparedStatement jps) throws SQLException {
    +        return jps.query.getParam(1).value;
    +    }
    +    
    +    private Calendar randomCalendar() {
    +        return Calendar.getInstance(randomTimeZone(), Locale.ROOT);
    +    }
    +    
    +    /*
    +     * Converts from UTC to the provided Calendar.
    +     * Helps checking if the converted date/time values using Calendars in set*(...,Calendar) methods did convert 
    +     * the values correctly to UTC.
    +     */
    +    private long convertFromUTCtoCalendar(Date date, Calendar nonDefaultCal) throws SQLException {
    +        return ZonedDateTime.ofInstant(date.toInstant(), ZoneOffset.UTC)
    +                .withZoneSameLocal(nonDefaultCal.getTimeZone().toZoneId())
    +                .toInstant().toEpochMilli();
    +    }
    +}
    diff --git a/x-pack/plugin/sql/sql-action/build.gradle b/x-pack/plugin/sql/sql-action/build.gradle
    new file mode 100644
    index 0000000000000..d8805d2e3db51
    --- /dev/null
    +++ b/x-pack/plugin/sql/sql-action/build.gradle
    @@ -0,0 +1,152 @@
    +
    +/*
    + * This project contains transport-level requests and responses that are shared between x-pack plugin and qa tests
    + */
    +
    +import org.elasticsearch.gradle.precommit.PrecommitTasks
    +
    +apply plugin: 'elasticsearch.build'
    +
    +description = 'Request and response objects shared by the cli, jdbc ' +
    +        'and the Elasticsearch plugin'
    +
    +dependencies {
    +    /* We'd like to just depend on xcontent but there are some bits of
    +     * :server that we rely on.... */
    +    compile (project(':server')) {
    +        transitive = false
    +    }
    +    compile (project(':libs:core')) {
    +        transitive = false
    +    }
    +    compile (project(':libs:x-content')) {
    +        transitive = false
    +    }
    +    compile xpackProject('plugin:sql:sql-proto')
    +    compile "org.apache.lucene:lucene-core:${versions.lucene}"
    +    compile 'joda-time:joda-time:2.9.9'
    +    runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
    +    runtime "org.apache.logging.log4j:log4j-api:${versions.log4j}"
    +    runtime "org.apache.logging.log4j:log4j-core:${versions.log4j}"
    +
    +    testCompile "org.elasticsearch.test:framework:${version}"
    +}
    +
    +forbiddenApisMain {
    +    //sql does not depend on server, so only jdk signatures should be checked
    +    signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')]
    +}
    +
    +dependencyLicenses {
    +    mapping from: /elasticsearch-core.*/, to: 'elasticsearch'
    +    mapping from: /jackson-.*/, to: 'jackson'
    +    mapping from: /lucene-.*/, to: 'lucene'
    +    ignoreSha 'elasticsearch'
    +    ignoreSha 'elasticsearch-core'
    +}
    +
    +thirdPartyAudit.excludes = [
    +        'com.fasterxml.jackson.dataformat.yaml.YAMLFactory',
    +        'com.fasterxml.jackson.dataformat.yaml.YAMLMapper',
    +
    +        // from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml)
    +        'com.fasterxml.jackson.databind.ObjectMapper',
    +        'org.fusesource.jansi.Ansi',
    +        'org.fusesource.jansi.AnsiRenderer$Code',
    +
    +        // from log4j
    +        'com.conversantmedia.util.concurrent.DisruptorBlockingQueue',
    +        'com.conversantmedia.util.concurrent.SpinPolicy',
    +        'com.fasterxml.jackson.annotation.JsonInclude$Include',
    +        'com.fasterxml.jackson.databind.DeserializationContext',
    +        'com.fasterxml.jackson.databind.DeserializationFeature',
    +        'com.fasterxml.jackson.databind.JsonMappingException',
    +        'com.fasterxml.jackson.databind.JsonNode',
    +        'com.fasterxml.jackson.databind.Module$SetupContext',
    +        'com.fasterxml.jackson.databind.ObjectReader',
    +        'com.fasterxml.jackson.databind.ObjectWriter',
    +        'com.fasterxml.jackson.databind.SerializerProvider',
    +        'com.fasterxml.jackson.databind.deser.std.StdDeserializer',
    +        'com.fasterxml.jackson.databind.deser.std.StdScalarDeserializer',
    +        'com.fasterxml.jackson.databind.module.SimpleModule',
    +        'com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter',
    +        'com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider',
    +        'com.fasterxml.jackson.databind.ser.std.StdScalarSerializer',
    +        'com.fasterxml.jackson.databind.ser.std.StdSerializer',
    +        'com.fasterxml.jackson.dataformat.xml.JacksonXmlModule',
    +        'com.fasterxml.jackson.dataformat.xml.XmlMapper',
    +        'com.fasterxml.jackson.dataformat.xml.util.DefaultXmlPrettyPrinter',
    +        'com.fasterxml.jackson.databind.node.JsonNodeFactory',
    +        'com.fasterxml.jackson.databind.node.ObjectNode',
    +        'com.lmax.disruptor.BlockingWaitStrategy',
    +        'com.lmax.disruptor.BusySpinWaitStrategy',
    +        'com.lmax.disruptor.EventFactory',
    +        'com.lmax.disruptor.EventTranslator',
    +        'com.lmax.disruptor.EventTranslatorTwoArg',
    +        'com.lmax.disruptor.EventTranslatorVararg',
    +        'com.lmax.disruptor.ExceptionHandler',
    +        'com.lmax.disruptor.LifecycleAware',
    +        'com.lmax.disruptor.RingBuffer',
    +        'com.lmax.disruptor.Sequence',
    +        'com.lmax.disruptor.SequenceReportingEventHandler',
    +        'com.lmax.disruptor.SleepingWaitStrategy',
    +        'com.lmax.disruptor.TimeoutBlockingWaitStrategy',
    +        'com.lmax.disruptor.WaitStrategy',
    +        'com.lmax.disruptor.YieldingWaitStrategy',
    +        'com.lmax.disruptor.dsl.Disruptor',
    +        'com.lmax.disruptor.dsl.ProducerType',
    +        'javax.jms.Connection',
    +        'javax.jms.ConnectionFactory',
    +        'javax.jms.Destination',
    +        'javax.jms.JMSException',
    +        'javax.jms.MapMessage',
    +        'javax.jms.Message',
    +        'javax.jms.MessageConsumer',
    +        'javax.jms.MessageProducer',
    +        'javax.jms.Session',
    +        'javax.mail.Authenticator',
    +        'javax.mail.Message$RecipientType',
    +        'javax.mail.PasswordAuthentication',
    +        'javax.mail.Session',
    +        'javax.mail.Transport',
    +        'javax.mail.internet.InternetAddress',
    +        'javax.mail.internet.InternetHeaders',
    +        'javax.mail.internet.MimeBodyPart',
    +        'javax.mail.internet.MimeMessage',
    +        'javax.mail.internet.MimeMultipart',
    +        'javax.mail.internet.MimeUtility',
    +        'javax.mail.util.ByteArrayDataSource',
    +        'javax.persistence.AttributeConverter',
    +        'javax.persistence.EntityManager',
    +        'javax.persistence.EntityManagerFactory',
    +        'javax.persistence.EntityTransaction',
    +        'javax.persistence.Persistence',
    +        'javax.persistence.PersistenceException',
    +        'org.apache.commons.compress.compressors.CompressorStreamFactory',
    +        'org.apache.commons.compress.utils.IOUtils',
    +        'org.apache.commons.csv.CSVFormat',
    +        'org.apache.commons.csv.QuoteMode',
    +        'org.apache.kafka.clients.producer.Callback',
    +        'org.apache.kafka.clients.producer.KafkaProducer',
    +        'org.apache.kafka.clients.producer.Producer',
    +        'org.apache.kafka.clients.producer.ProducerRecord',
    +        'org.apache.kafka.clients.producer.RecordMetadata',
    +        'org.codehaus.stax2.XMLStreamWriter2',
    +        'org.jctools.queues.MessagePassingQueue$Consumer',
    +        'org.jctools.queues.MpscArrayQueue',
    +        'org.osgi.framework.AdaptPermission',
    +        'org.osgi.framework.AdminPermission',
    +        'org.osgi.framework.Bundle',
    +        'org.osgi.framework.BundleActivator',
    +        'org.osgi.framework.BundleContext',
    +        'org.osgi.framework.BundleEvent',
    +        'org.osgi.framework.BundleReference',
    +        'org.osgi.framework.FrameworkUtil',
    +        'org.osgi.framework.ServiceRegistration',
    +        'org.osgi.framework.SynchronousBundleListener',
    +        'org.osgi.framework.wiring.BundleWire',
    +        'org.osgi.framework.wiring.BundleWiring',
    +        'org.zeromq.ZMQ$Context',
    +        'org.zeromq.ZMQ$Socket',
    +        'org.zeromq.ZMQ'
    +]
    diff --git a/x-pack/plugin/sql/sql-shared-client/licenses/jackson-LICENSE b/x-pack/plugin/sql/sql-action/licenses/jackson-LICENSE
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/licenses/jackson-LICENSE
    rename to x-pack/plugin/sql/sql-action/licenses/jackson-LICENSE
    diff --git a/x-pack/plugin/sql/sql-shared-client/licenses/jackson-NOTICE b/x-pack/plugin/sql/sql-action/licenses/jackson-NOTICE
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/licenses/jackson-NOTICE
    rename to x-pack/plugin/sql/sql-action/licenses/jackson-NOTICE
    diff --git a/x-pack/plugin/sql/sql-shared-client/licenses/jackson-core-2.8.10.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/jackson-core-2.8.10.jar.sha1
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/licenses/jackson-core-2.8.10.jar.sha1
    rename to x-pack/plugin/sql/sql-action/licenses/jackson-core-2.8.10.jar.sha1
    diff --git a/x-pack/plugin/sql/sql-shared-proto/licenses/joda-time-2.9.9.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/joda-time-2.9.9.jar.sha1
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/licenses/joda-time-2.9.9.jar.sha1
    rename to x-pack/plugin/sql/sql-action/licenses/joda-time-2.9.9.jar.sha1
    diff --git a/x-pack/plugin/sql/sql-shared-proto/licenses/joda-time-LICENSE.txt b/x-pack/plugin/sql/sql-action/licenses/joda-time-LICENSE.txt
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/licenses/joda-time-LICENSE.txt
    rename to x-pack/plugin/sql/sql-action/licenses/joda-time-LICENSE.txt
    diff --git a/x-pack/plugin/sql/sql-shared-proto/licenses/joda-time-NOTICE.txt b/x-pack/plugin/sql/sql-action/licenses/joda-time-NOTICE.txt
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/licenses/joda-time-NOTICE.txt
    rename to x-pack/plugin/sql/sql-action/licenses/joda-time-NOTICE.txt
    diff --git a/x-pack/plugin/sql/sql-proto/licenses/log4j-api-2.9.1.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/log4j-api-2.9.1.jar.sha1
    similarity index 100%
    rename from x-pack/plugin/sql/sql-proto/licenses/log4j-api-2.9.1.jar.sha1
    rename to x-pack/plugin/sql/sql-action/licenses/log4j-api-2.9.1.jar.sha1
    diff --git a/x-pack/plugin/sql/sql-proto/licenses/log4j-api-LICENSE.txt b/x-pack/plugin/sql/sql-action/licenses/log4j-api-LICENSE.txt
    similarity index 100%
    rename from x-pack/plugin/sql/sql-proto/licenses/log4j-api-LICENSE.txt
    rename to x-pack/plugin/sql/sql-action/licenses/log4j-api-LICENSE.txt
    diff --git a/x-pack/plugin/sql/sql-proto/licenses/log4j-api-NOTICE.txt b/x-pack/plugin/sql/sql-action/licenses/log4j-api-NOTICE.txt
    similarity index 100%
    rename from x-pack/plugin/sql/sql-proto/licenses/log4j-api-NOTICE.txt
    rename to x-pack/plugin/sql/sql-action/licenses/log4j-api-NOTICE.txt
    diff --git a/x-pack/plugin/sql/sql-proto/licenses/log4j-core-2.9.1.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/log4j-core-2.9.1.jar.sha1
    similarity index 100%
    rename from x-pack/plugin/sql/sql-proto/licenses/log4j-core-2.9.1.jar.sha1
    rename to x-pack/plugin/sql/sql-action/licenses/log4j-core-2.9.1.jar.sha1
    diff --git a/x-pack/plugin/sql/sql-proto/licenses/log4j-core-LICENSE.txt b/x-pack/plugin/sql/sql-action/licenses/log4j-core-LICENSE.txt
    similarity index 100%
    rename from x-pack/plugin/sql/sql-proto/licenses/log4j-core-LICENSE.txt
    rename to x-pack/plugin/sql/sql-action/licenses/log4j-core-LICENSE.txt
    diff --git a/x-pack/plugin/sql/sql-proto/licenses/log4j-core-NOTICE.txt b/x-pack/plugin/sql/sql-action/licenses/log4j-core-NOTICE.txt
    similarity index 100%
    rename from x-pack/plugin/sql/sql-proto/licenses/log4j-core-NOTICE.txt
    rename to x-pack/plugin/sql/sql-action/licenses/log4j-core-NOTICE.txt
    diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-LICENSE.txt b/x-pack/plugin/sql/sql-action/licenses/lucene-LICENSE.txt
    similarity index 100%
    rename from x-pack/plugin/sql/sql-proto/licenses/lucene-LICENSE.txt
    rename to x-pack/plugin/sql/sql-action/licenses/lucene-LICENSE.txt
    diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-NOTICE.txt b/x-pack/plugin/sql/sql-action/licenses/lucene-NOTICE.txt
    similarity index 100%
    rename from x-pack/plugin/sql/sql-proto/licenses/lucene-NOTICE.txt
    rename to x-pack/plugin/sql/sql-action/licenses/lucene-NOTICE.txt
    diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.4.0.jar.sha1
    similarity index 100%
    rename from x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1
    rename to x-pack/plugin/sql/sql-action/licenses/lucene-core-7.4.0.jar.sha1
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlQueryRequest.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java
    similarity index 99%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlQueryRequest.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java
    index dcb210ca68d9b..5ca956404be66 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlQueryRequest.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.CompositeIndicesRequest;
     import org.elasticsearch.common.Nullable;
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlRequest.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlRequest.java
    similarity index 98%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlRequest.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlRequest.java
    index 2cb23f796d609..ba513fed7da24 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/AbstractSqlRequest.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlRequest.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.ActionRequest;
     import org.elasticsearch.action.ActionRequestValidationException;
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/CliFormatter.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/CliFormatter.java
    similarity index 99%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/CliFormatter.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/CliFormatter.java
    index 359652fa4f203..c773e75aa18be 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/CliFormatter.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/CliFormatter.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.common.io.stream.StreamInput;
     import org.elasticsearch.common.io.stream.StreamOutput;
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorAction.java
    similarity index 94%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorAction.java
    index 0908af76bebee..ee824fc04e9f5 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorAction.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.Action;
     
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequest.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequest.java
    similarity index 98%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequest.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequest.java
    index 45dda28588726..7a1f72dd74a80 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequest.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequest.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.ActionRequestValidationException;
     import org.elasticsearch.common.ParseField;
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequestBuilder.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequestBuilder.java
    similarity index 94%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequestBuilder.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequestBuilder.java
    index b7a1f383a2f8b..40ce4f8dcf355 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequestBuilder.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequestBuilder.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.ActionRequestBuilder;
     import org.elasticsearch.client.ElasticsearchClient;
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponse.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorResponse.java
    similarity index 98%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponse.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorResponse.java
    index 3bb3df9a47ffd..ba9e4193d9907 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponse.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorResponse.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.ActionResponse;
     import org.elasticsearch.common.io.stream.StreamInput;
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryAction.java
    similarity index 93%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryAction.java
    index 5b9a5b1c3ef47..f25eef31d3dc3 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryAction.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.Action;
     
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequest.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java
    similarity index 98%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequest.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java
    index 16c3cbf9c39c7..a7166f56790c0 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequest.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.ActionRequestValidationException;
     import org.elasticsearch.common.ParseField;
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestBuilder.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java
    similarity index 98%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestBuilder.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java
    index 812b72473ad14..b1c6c310f7ade 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestBuilder.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.ActionRequestBuilder;
     import org.elasticsearch.client.ElasticsearchClient;
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponse.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java
    similarity index 99%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponse.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java
    index 118ba81f82df0..1dc356f9fba5f 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponse.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.ActionResponse;
     import org.elasticsearch.common.Nullable;
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateAction.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateAction.java
    similarity index 94%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateAction.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateAction.java
    index 978a11fbbb645..2431ecc1edf8c 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateAction.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateAction.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.Action;
     
    @@ -23,4 +23,4 @@ private SqlTranslateAction() {
         public SqlTranslateResponse newResponse() {
             return new SqlTranslateResponse();
         }
    -}
    \ No newline at end of file
    +}
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequest.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequest.java
    similarity index 93%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequest.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequest.java
    index 103bfe5fddd69..73187a02856e0 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequest.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequest.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.ActionRequestValidationException;
     import org.elasticsearch.common.Strings;
    @@ -15,6 +15,7 @@
     import org.elasticsearch.index.query.QueryBuilder;
     import org.elasticsearch.xpack.sql.proto.Mode;
     import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue;
    +import org.elasticsearch.xpack.sql.proto.SqlQueryRequest;
     
     import java.io.IOException;
     import java.util.List;
    @@ -63,7 +64,7 @@ public static SqlTranslateRequest fromXContent(XContentParser parser, Mode mode)
         @Override
         public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
             // This is needed just to test parsing of SqlTranslateRequest, so we can reuse SqlQuerySerialization
    -        return new org.elasticsearch.xpack.sql.proto.SqlQueryRequest(mode(), query(), params(), timeZone(), fetchSize(),
    +        return new SqlQueryRequest(mode(), query(), params(), timeZone(), fetchSize(),
                 requestTimeout(), pageTimeout(), filter(), null).toXContent(builder, params);
     
         }
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestBuilder.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java
    similarity index 97%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestBuilder.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java
    index 9b9b00ebde9f1..22af91be67ddc 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestBuilder.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.ActionRequestBuilder;
     import org.elasticsearch.client.ElasticsearchClient;
    diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateResponse.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java
    similarity index 97%
    rename from x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateResponse.java
    rename to x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java
    index a19f8d1fc3cba..e2efd4b46b674 100644
    --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateResponse.java
    +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponse.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.action.ActionResponse;
     import org.elasticsearch.common.io.stream.StreamInput;
    diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequestTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequestTests.java
    similarity index 97%
    rename from x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequestTests.java
    rename to x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequestTests.java
    index e479ae8b4f1ea..e9c7043519b4c 100644
    --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorRequestTests.java
    +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequestTests.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.common.io.stream.Writeable;
     import org.elasticsearch.common.xcontent.XContentParser;
    diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponseTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorResponseTests.java
    similarity index 96%
    rename from x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponseTests.java
    rename to x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorResponseTests.java
    index 94964428bb4f3..9e9f200abac52 100644
    --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorResponseTests.java
    +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorResponseTests.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.common.xcontent.XContentParser;
     import org.elasticsearch.test.AbstractStreamableXContentTestCase;
    diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestTests.java
    similarity index 96%
    rename from x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestTests.java
    rename to x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestTests.java
    index 0e4a183ab1626..84f361cabc7fc 100644
    --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryRequestTests.java
    +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestTests.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
     import org.elasticsearch.common.io.stream.Writeable;
    @@ -25,8 +25,8 @@
     import java.util.function.Consumer;
     import java.util.function.Supplier;
     
    -import static org.elasticsearch.xpack.sql.plugin.SqlTestUtils.randomFilter;
    -import static org.elasticsearch.xpack.sql.plugin.SqlTestUtils.randomFilterOrNull;
    +import static org.elasticsearch.xpack.sql.action.SqlTestUtils.randomFilter;
    +import static org.elasticsearch.xpack.sql.action.SqlTestUtils.randomFilterOrNull;
     
     public class SqlQueryRequestTests extends AbstractSerializingTestCase {
     
    diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponseTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryResponseTests.java
    similarity index 99%
    rename from x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponseTests.java
    rename to x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryResponseTests.java
    index bc5e5ae2a0180..0957b4c5202c1 100644
    --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlQueryResponseTests.java
    +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryResponseTests.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.common.Strings;
     import org.elasticsearch.common.bytes.BytesReference;
    diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTestUtils.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTestUtils.java
    similarity index 96%
    rename from x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTestUtils.java
    rename to x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTestUtils.java
    index 05a164c1c94b7..0aee6aade9623 100644
    --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTestUtils.java
    +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTestUtils.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import com.carrotsearch.randomizedtesting.generators.RandomStrings;
     import org.elasticsearch.index.query.QueryBuilder;
    diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestTests.java
    similarity index 95%
    rename from x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestTests.java
    rename to x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestTests.java
    index 2eb3d71bbf410..cd022746baeef 100644
    --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateRequestTests.java
    +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestTests.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
     import org.elasticsearch.common.io.stream.Writeable;
    @@ -21,8 +21,8 @@
     import java.util.Collections;
     import java.util.function.Consumer;
     
    -import static org.elasticsearch.xpack.sql.plugin.SqlTestUtils.randomFilter;
    -import static org.elasticsearch.xpack.sql.plugin.SqlTestUtils.randomFilterOrNull;
    +import static org.elasticsearch.xpack.sql.action.SqlTestUtils.randomFilter;
    +import static org.elasticsearch.xpack.sql.action.SqlTestUtils.randomFilterOrNull;
     
     public class SqlTranslateRequestTests extends AbstractSerializingTestCase {
     
    diff --git a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateResponseTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponseTests.java
    similarity index 94%
    rename from x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateResponseTests.java
    rename to x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponseTests.java
    index 76f73fada0663..647bb90952073 100644
    --- a/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateResponseTests.java
    +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateResponseTests.java
    @@ -3,11 +3,12 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.plugin;
    +package org.elasticsearch.xpack.sql.action;
     
     import org.elasticsearch.search.builder.SearchSourceBuilder;
     import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
     import org.elasticsearch.test.AbstractStreamableTestCase;
    +import org.elasticsearch.xpack.sql.action.SqlTranslateResponse;
     
     import java.io.IOException;
     
    diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle
    index 06eb24c743ad8..b90b07abad3d1 100644
    --- a/x-pack/plugin/sql/sql-cli/build.gradle
    +++ b/x-pack/plugin/sql/sql-cli/build.gradle
    @@ -16,8 +16,8 @@ description = 'Command line interface to Elasticsearch that speaks SQL'
     
     dependencies {
         compile "org.jline:jline:3.6.0"
    -    compile xpackProject('plugin:sql:sql-shared-client')
    -    compile xpackProject('plugin:sql:sql-proto')
    +    compile xpackProject('plugin:sql:sql-client')
    +    compile xpackProject('plugin:sql:sql-action')
         compile "org.elasticsearch:elasticsearch-cli:${version}"
     
         runtime "org.fusesource.jansi:jansi:1.16"
    @@ -31,13 +31,13 @@ dependencyLicenses {
         mapping from: /elasticsearch-core.*/, to: 'elasticsearch'
         mapping from: /jackson-.*/, to: 'jackson'
         mapping from: /lucene-.*/, to: 'lucene'
    -    mapping from: /sql-proto.*/, to: 'elasticsearch'
    -    mapping from: /sql-shared-client.*/, to: 'elasticsearch'
    +    mapping from: /sql-action.*/, to: 'elasticsearch'
    +    mapping from: /sql-client.*/, to: 'elasticsearch'
         ignoreSha 'elasticsearch-cli'
         ignoreSha 'elasticsearch-core'
         ignoreSha 'elasticsearch'
    -    ignoreSha 'sql-proto'
    -    ignoreSha 'sql-shared-client'
    +    ignoreSha 'sql-action'
    +    ignoreSha 'sql-client'
     }
     
     /*
    diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java
    index d1f59c97e5ab1..357a4bcb5a770 100644
    --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java
    +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java
    @@ -21,9 +21,9 @@
     import org.elasticsearch.xpack.sql.cli.command.ServerInfoCliCommand;
     import org.elasticsearch.xpack.sql.cli.command.ServerQueryCliCommand;
     import org.elasticsearch.xpack.sql.client.HttpClient;
    -import org.elasticsearch.xpack.sql.client.shared.ClientException;
    -import org.elasticsearch.xpack.sql.client.shared.ConnectionConfiguration;
    -import org.elasticsearch.xpack.sql.client.shared.Version;
    +import org.elasticsearch.xpack.sql.client.ClientException;
    +import org.elasticsearch.xpack.sql.client.ConnectionConfiguration;
    +import org.elasticsearch.xpack.sql.client.Version;
     import org.jline.terminal.TerminalBuilder;
     import java.io.IOException;
     import java.net.ConnectException;
    diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilder.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilder.java
    index dbd0e230043ce..9033133a0efc2 100644
    --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilder.java
    +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilder.java
    @@ -8,7 +8,7 @@
     import org.elasticsearch.cli.ExitCodes;
     import org.elasticsearch.cli.SuppressForbidden;
     import org.elasticsearch.cli.UserException;
    -import org.elasticsearch.xpack.sql.client.shared.ConnectionConfiguration;
    +import org.elasticsearch.xpack.sql.client.ConnectionConfiguration;
     
     import java.net.URI;
     import java.nio.file.Files;
    @@ -16,8 +16,8 @@
     import java.nio.file.Paths;
     import java.util.Properties;
     
    -import static org.elasticsearch.xpack.sql.client.shared.UriUtils.parseURI;
    -import static org.elasticsearch.xpack.sql.client.shared.UriUtils.removeQuery;
    +import static org.elasticsearch.xpack.sql.client.UriUtils.parseURI;
    +import static org.elasticsearch.xpack.sql.client.UriUtils.removeQuery;
     
     /**
      * Connection Builder. Can interactively ask users for the password if it is not provided
    diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java
    index 82a2a8817f41d..f5b91704aeae9 100644
    --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java
    +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java
    @@ -6,8 +6,8 @@
     package org.elasticsearch.xpack.sql.cli.command;
     
     import org.elasticsearch.xpack.sql.client.HttpClient;
    -import org.elasticsearch.xpack.sql.client.shared.ClientException;
    -import org.elasticsearch.xpack.sql.client.shared.Version;
    +import org.elasticsearch.xpack.sql.client.ClientException;
    +import org.elasticsearch.xpack.sql.client.Version;
     import org.elasticsearch.xpack.sql.proto.MainResponse;
     import org.elasticsearch.xpack.sql.proto.Protocol;
     
    diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java
    index aa8bc499cd29e..1eb982c814c64 100644
    --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java
    +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java
    @@ -7,8 +7,8 @@
     
     import org.elasticsearch.xpack.sql.cli.CliTerminal;
     import org.elasticsearch.xpack.sql.client.HttpClient;
    -import org.elasticsearch.xpack.sql.client.shared.JreHttpUrlConnection;
    -import org.elasticsearch.xpack.sql.plugin.CliFormatter;
    +import org.elasticsearch.xpack.sql.client.JreHttpUrlConnection;
    +import org.elasticsearch.xpack.sql.action.CliFormatter;
     import org.elasticsearch.xpack.sql.proto.SqlQueryResponse;
     
     import java.sql.SQLException;
    diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/CliSessionTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/CliSessionTests.java
    index 8b0138865ce20..295ebe1fb0792 100644
    --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/CliSessionTests.java
    +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/CliSessionTests.java
    @@ -10,8 +10,8 @@
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.xpack.sql.cli.command.CliSession;
     import org.elasticsearch.xpack.sql.client.HttpClient;
    -import org.elasticsearch.xpack.sql.client.shared.ClientException;
    -import org.elasticsearch.xpack.sql.client.shared.Version;
    +import org.elasticsearch.xpack.sql.client.ClientException;
    +import org.elasticsearch.xpack.sql.client.Version;
     import org.elasticsearch.xpack.sql.proto.MainResponse;
     
     import java.sql.SQLException;
    diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilderTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilderTests.java
    index 69b77931ff0d8..d825b99230063 100644
    --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilderTests.java
    +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilderTests.java
    @@ -7,8 +7,8 @@
     
     import org.elasticsearch.cli.UserException;
     import org.elasticsearch.test.ESTestCase;
    -import org.elasticsearch.xpack.sql.client.shared.ConnectionConfiguration;
    -import org.elasticsearch.xpack.sql.client.shared.SslConfig;
    +import org.elasticsearch.xpack.sql.client.ConnectionConfiguration;
    +import org.elasticsearch.xpack.sql.client.SslConfig;
     import java.net.URI;
     import java.nio.file.Path;
     import java.util.Properties;
    diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/VersionTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/VersionTests.java
    index 0f1effc446389..5be8bdd4df025 100644
    --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/VersionTests.java
    +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/VersionTests.java
    @@ -6,7 +6,7 @@
     package org.elasticsearch.xpack.sql.cli;
     
     import org.elasticsearch.test.ESTestCase;
    -import org.elasticsearch.xpack.sql.client.shared.Version;
    +import org.elasticsearch.xpack.sql.client.Version;
     
     public class VersionTests extends ESTestCase {
         public void testVersionIsCurrent() {
    diff --git a/x-pack/plugin/sql/sql-shared-client/build.gradle b/x-pack/plugin/sql/sql-client/build.gradle
    similarity index 80%
    rename from x-pack/plugin/sql/sql-shared-client/build.gradle
    rename to x-pack/plugin/sql/sql-client/build.gradle
    index f7f5efec7af3f..fbc411e44596d 100644
    --- a/x-pack/plugin/sql/sql-shared-client/build.gradle
    +++ b/x-pack/plugin/sql/sql-client/build.gradle
    @@ -1,8 +1,6 @@
     
     /*
    - * This project is named sql-shared-client because it is in the
    - * "org.elasticsearch.plugin" group and it'd be super confusing for it to just
    - * be called "shared-client" there.
    + * The minimal dependencies REST-based SQL client that is used by CLI and JDBC
      */
     
     apply plugin: 'elasticsearch.build'
    @@ -10,18 +8,18 @@ apply plugin: 'elasticsearch.build'
     description = 'Code shared between jdbc and cli'
     
     dependencies {
    -    compile xpackProject('plugin:sql:sql-shared-proto')
    +    compile xpackProject('plugin:sql:sql-proto')
         compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
         testCompile "org.elasticsearch.test:framework:${version}"
     }
     
     dependencyLicenses {
         mapping from: /jackson-.*/, to: 'jackson'
    -    mapping from: /sql-shared-proto.*/, to: 'elasticsearch'
    +    mapping from: /sql-proto.*/, to: 'elasticsearch'
         mapping from: /elasticsearch-cli.*/, to: 'elasticsearch'
         mapping from: /elasticsearch-core.*/, to: 'elasticsearch'
         mapping from: /lucene-.*/, to: 'lucene'
    -    ignoreSha 'sql-proto'
    +    ignoreSha 'sql-action'
         ignoreSha 'elasticsearch'
         ignoreSha 'elasticsearch-core'
     }
    diff --git a/x-pack/plugin/sql/sql-shared-proto/licenses/jackson-LICENSE b/x-pack/plugin/sql/sql-client/licenses/jackson-LICENSE
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/licenses/jackson-LICENSE
    rename to x-pack/plugin/sql/sql-client/licenses/jackson-LICENSE
    diff --git a/x-pack/plugin/sql/sql-shared-proto/licenses/jackson-NOTICE b/x-pack/plugin/sql/sql-client/licenses/jackson-NOTICE
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/licenses/jackson-NOTICE
    rename to x-pack/plugin/sql/sql-client/licenses/jackson-NOTICE
    diff --git a/x-pack/plugin/sql/sql-shared-proto/licenses/jackson-core-2.8.10.jar.sha1 b/x-pack/plugin/sql/sql-client/licenses/jackson-core-2.8.10.jar.sha1
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/licenses/jackson-core-2.8.10.jar.sha1
    rename to x-pack/plugin/sql/sql-client/licenses/jackson-core-2.8.10.jar.sha1
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/Bytes.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/Bytes.java
    similarity index 93%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/Bytes.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/Bytes.java
    index 0e0a3af56339f..4d5e78d91dbd8 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/Bytes.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/Bytes.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.nio.charset.StandardCharsets;
     import java.util.Arrays;
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/CheckedBiFunction.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/CheckedBiFunction.java
    similarity index 91%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/CheckedBiFunction.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/CheckedBiFunction.java
    index ba16da3bfd313..afe1ad14eed97 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/CheckedBiFunction.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/CheckedBiFunction.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.util.function.BiFunction;
     
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/CheckedConsumer.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/CheckedConsumer.java
    similarity index 91%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/CheckedConsumer.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/CheckedConsumer.java
    index 62258eebefaaa..12aa32addaa90 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/CheckedConsumer.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/CheckedConsumer.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.util.function.Consumer;
     
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/CheckedFunction.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/CheckedFunction.java
    similarity index 91%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/CheckedFunction.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/CheckedFunction.java
    index 67e174ffd7075..35a0a22f0598d 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/CheckedFunction.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/CheckedFunction.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.util.function.Function;
     
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/ClientException.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ClientException.java
    similarity index 92%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/ClientException.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ClientException.java
    index 5eb18b86091c1..996bee3bed405 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/ClientException.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ClientException.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     /**
      * A general-purpose exception to be used on the client-side code. Does not support var-args formatting.
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/ConnectionConfiguration.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java
    similarity index 99%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/ConnectionConfiguration.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java
    index aca262e172647..0de71d27471e7 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/ConnectionConfiguration.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.net.URI;
     import java.net.URISyntaxException;
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java
    similarity index 95%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java
    index 27e1870904f54..c84bc7b577a04 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java
    @@ -14,11 +14,7 @@
     import org.elasticsearch.common.xcontent.XContentParser;
     import org.elasticsearch.common.xcontent.XContentType;
     import org.elasticsearch.core.internal.io.Streams;
    -import org.elasticsearch.xpack.sql.client.shared.CheckedFunction;
    -import org.elasticsearch.xpack.sql.client.shared.ClientException;
    -import org.elasticsearch.xpack.sql.client.shared.ConnectionConfiguration;
    -import org.elasticsearch.xpack.sql.client.shared.JreHttpUrlConnection;
    -import org.elasticsearch.xpack.sql.client.shared.JreHttpUrlConnection.ResponseOrException;
    +import org.elasticsearch.xpack.sql.client.JreHttpUrlConnection.ResponseOrException;
     import org.elasticsearch.xpack.sql.proto.AbstractSqlRequest;
     import org.elasticsearch.xpack.sql.proto.MainResponse;
     import org.elasticsearch.xpack.sql.proto.Mode;
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/JreHttpUrlConnection.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java
    similarity index 99%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/JreHttpUrlConnection.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java
    index 2f6289ee39507..0dca4a88f0592 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/JreHttpUrlConnection.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.io.BufferedInputStream;
     import java.io.Closeable;
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/ObjectUtils.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ObjectUtils.java
    similarity index 94%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/ObjectUtils.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ObjectUtils.java
    index df924cdb37513..3c4fa7fcaad9b 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/ObjectUtils.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ObjectUtils.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.util.Arrays;
     import java.util.Map;
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/ProxyConfig.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ProxyConfig.java
    similarity index 97%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/ProxyConfig.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ProxyConfig.java
    index ca311e5292d26..4ec33f4fbe83d 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/ProxyConfig.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ProxyConfig.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.net.InetSocketAddress;
     import java.net.Proxy;
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/RemoteFailure.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/RemoteFailure.java
    similarity index 99%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/RemoteFailure.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/RemoteFailure.java
    index f9eccb4f157ef..61e62c390ec11 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/RemoteFailure.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/RemoteFailure.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import com.fasterxml.jackson.core.JsonFactory;
     import com.fasterxml.jackson.core.JsonGenerator;
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/SslConfig.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SslConfig.java
    similarity index 99%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/SslConfig.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SslConfig.java
    index 35a1ebe3b96c6..fecfb44492c28 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/SslConfig.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SslConfig.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.io.IOException;
     import java.io.InputStream;
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/StringUtils.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/StringUtils.java
    similarity index 99%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/StringUtils.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/StringUtils.java
    index 192c217be513c..e4e5bf4d98517 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/StringUtils.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/StringUtils.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.nio.charset.StandardCharsets;
     import java.util.ArrayList;
    @@ -305,4 +305,4 @@ public static String asHexString(byte[] content, int offset, int length) {
             return buf.toString();
         }
     
    -}
    \ No newline at end of file
    +}
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/SuppressForbidden.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SuppressForbidden.java
    similarity index 93%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/SuppressForbidden.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SuppressForbidden.java
    index 52b864edff4bc..fbbf13782a97d 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/SuppressForbidden.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SuppressForbidden.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.lang.annotation.ElementType;
     import java.lang.annotation.Retention;
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/UriUtils.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/UriUtils.java
    similarity index 98%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/UriUtils.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/UriUtils.java
    index f8c2e73e6a0c0..26113010c61da 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/UriUtils.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/UriUtils.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.net.URI;
     import java.net.URISyntaxException;
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/Version.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/Version.java
    similarity index 98%
    rename from x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/Version.java
    rename to x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/Version.java
    index bfa9e0a3cb492..d7327af6c4e48 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/main/java/org/elasticsearch/xpack/sql/client/shared/Version.java
    +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/Version.java
    @@ -3,7 +3,7 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import java.io.IOException;
     import java.net.URL;
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/java/org/elasticsearch/xpack/sql/client/shared/RemoteFailureTests.java b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/RemoteFailureTests.java
    similarity index 99%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/java/org/elasticsearch/xpack/sql/client/shared/RemoteFailureTests.java
    rename to x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/RemoteFailureTests.java
    index ab0bc40f3ba72..ee3a859b548da 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/test/java/org/elasticsearch/xpack/sql/client/shared/RemoteFailureTests.java
    +++ b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/RemoteFailureTests.java
    @@ -3,10 +3,11 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import org.elasticsearch.common.bytes.BytesArray;
     import org.elasticsearch.test.ESTestCase;
    +import org.elasticsearch.xpack.sql.client.RemoteFailure;
     
     import java.io.IOException;
     import java.io.InputStream;
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/java/org/elasticsearch/xpack/sql/client/shared/StringUtilsTests.java b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/StringUtilsTests.java
    similarity index 81%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/java/org/elasticsearch/xpack/sql/client/shared/StringUtilsTests.java
    rename to x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/StringUtilsTests.java
    index b758d361ab978..34f1f20056a05 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/test/java/org/elasticsearch/xpack/sql/client/shared/StringUtilsTests.java
    +++ b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/StringUtilsTests.java
    @@ -3,11 +3,11 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import org.elasticsearch.test.ESTestCase;
     
    -import static org.elasticsearch.xpack.sql.client.shared.StringUtils.nullAsEmpty;
    +import static org.elasticsearch.xpack.sql.client.StringUtils.nullAsEmpty;
     
     public class StringUtilsTests extends ESTestCase {
         public void testNullAsEmpty() {
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/java/org/elasticsearch/xpack/sql/client/shared/UriUtilsTests.java b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/UriUtilsTests.java
    similarity index 94%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/java/org/elasticsearch/xpack/sql/client/shared/UriUtilsTests.java
    rename to x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/UriUtilsTests.java
    index f75b20d0f0d3f..0b7f6c47b0df0 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/test/java/org/elasticsearch/xpack/sql/client/shared/UriUtilsTests.java
    +++ b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/UriUtilsTests.java
    @@ -3,14 +3,14 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import org.elasticsearch.test.ESTestCase;
     
     import java.net.URI;
     
    -import static org.elasticsearch.xpack.sql.client.shared.UriUtils.parseURI;
    -import static org.elasticsearch.xpack.sql.client.shared.UriUtils.removeQuery;
    +import static org.elasticsearch.xpack.sql.client.UriUtils.parseURI;
    +import static org.elasticsearch.xpack.sql.client.UriUtils.removeQuery;
     
     public class UriUtilsTests extends ESTestCase {
     
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/java/org/elasticsearch/xpack/sql/client/shared/VersionTests.java b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/VersionTests.java
    similarity index 94%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/java/org/elasticsearch/xpack/sql/client/shared/VersionTests.java
    rename to x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/VersionTests.java
    index 9eabed07931a0..7ed772e352531 100644
    --- a/x-pack/plugin/sql/sql-shared-client/src/test/java/org/elasticsearch/xpack/sql/client/shared/VersionTests.java
    +++ b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/VersionTests.java
    @@ -3,9 +3,10 @@
      * or more contributor license agreements. Licensed under the Elastic License;
      * you may not use this file except in compliance with the Elastic License.
      */
    -package org.elasticsearch.xpack.sql.client.shared;
    +package org.elasticsearch.xpack.sql.client;
     
     import org.elasticsearch.test.ESTestCase;
    +import org.elasticsearch.xpack.sql.client.Version;
     
     public class VersionTests extends ESTestCase {
         public void test70Version() {
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/basic.json b/x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/basic.json
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/basic.json
    rename to x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/basic.json
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/bogus_error.json b/x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/bogus_error.json
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/bogus_error.json
    rename to x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/bogus_error.json
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/invalid_json.txt b/x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/invalid_json.txt
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/invalid_json.txt
    rename to x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/invalid_json.txt
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/missing_auth.json b/x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/missing_auth.json
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/missing_auth.json
    rename to x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/missing_auth.json
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/nested.json b/x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/nested.json
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/nested.json
    rename to x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/nested.json
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/no_error.json b/x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/no_error.json
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/no_error.json
    rename to x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/no_error.json
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/no_stack.json b/x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/no_stack.json
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/no_stack.json
    rename to x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/no_stack.json
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/no_type.json b/x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/no_type.json
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/resources/remote_failure/no_type.json
    rename to x-pack/plugin/sql/sql-client/src/test/resources/remote_failure/no_type.json
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/resources/ssl/client.keystore b/x-pack/plugin/sql/sql-client/src/test/resources/ssl/client.keystore
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/resources/ssl/client.keystore
    rename to x-pack/plugin/sql/sql-client/src/test/resources/ssl/client.keystore
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/resources/ssl/readme.txt b/x-pack/plugin/sql/sql-client/src/test/resources/ssl/readme.txt
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/resources/ssl/readme.txt
    rename to x-pack/plugin/sql/sql-client/src/test/resources/ssl/readme.txt
    diff --git a/x-pack/plugin/sql/sql-shared-client/src/test/resources/ssl/server.keystore b/x-pack/plugin/sql/sql-client/src/test/resources/ssl/server.keystore
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-client/src/test/resources/ssl/server.keystore
    rename to x-pack/plugin/sql/sql-client/src/test/resources/ssl/server.keystore
    diff --git a/x-pack/plugin/sql/sql-proto/build.gradle b/x-pack/plugin/sql/sql-proto/build.gradle
    index b1d8497f9b8df..5a1439f4360bc 100644
    --- a/x-pack/plugin/sql/sql-proto/build.gradle
    +++ b/x-pack/plugin/sql/sql-proto/build.gradle
    @@ -1,8 +1,6 @@
     
     /*
    - * This project is named sql-proto because it is in the
    - * "org.elasticsearch.plugin" group and it'd be super confusing for it to just
    - * be called "proto" there.
    + * This project contains XContent protocol classes shared between server and http client
      */
     
     import org.elasticsearch.gradle.precommit.PrecommitTasks
    @@ -13,23 +11,14 @@ description = 'Request and response objects shared by the cli, jdbc ' +
             'and the Elasticsearch plugin'
     
     dependencies {
    -    /* We'd like to just depend on xcontent but there are some bits of
    -     * :server that we rely on.... */
    -    compile (project(':server')) {
    -        transitive = false
    -    }
         compile (project(':libs:core')) {
             transitive = false
         }
         compile (project(':libs:x-content')) {
             transitive = false
         }
    -    compile xpackProject('plugin:sql:sql-shared-proto')
    -    compile "org.apache.lucene:lucene-core:${versions.lucene}"
         compile 'joda-time:joda-time:2.9.9'
         runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
    -    runtime "org.apache.logging.log4j:log4j-api:${versions.log4j}"
    -    runtime "org.apache.logging.log4j:log4j-core:${versions.log4j}"
     
         testCompile "org.elasticsearch.test:framework:${version}"
     }
    @@ -42,113 +31,5 @@ forbiddenApisMain {
     dependencyLicenses {
         mapping from: /elasticsearch-core.*/, to: 'elasticsearch'
         mapping from: /jackson-.*/, to: 'jackson'
    -    mapping from: /lucene-.*/, to: 'lucene'
    -    ignoreSha 'elasticsearch'
         ignoreSha 'elasticsearch-core'
     }
    -
    -thirdPartyAudit.excludes = [
    -        'com.fasterxml.jackson.dataformat.yaml.YAMLFactory',
    -        'com.fasterxml.jackson.dataformat.yaml.YAMLMapper',
    -
    -        // from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml)
    -        'com.fasterxml.jackson.databind.ObjectMapper',
    -        'org.fusesource.jansi.Ansi',
    -        'org.fusesource.jansi.AnsiRenderer$Code',
    -
    -        // from log4j
    -        'com.conversantmedia.util.concurrent.DisruptorBlockingQueue',
    -        'com.conversantmedia.util.concurrent.SpinPolicy',
    -        'com.fasterxml.jackson.annotation.JsonInclude$Include',
    -        'com.fasterxml.jackson.databind.DeserializationContext',
    -        'com.fasterxml.jackson.databind.DeserializationFeature',
    -        'com.fasterxml.jackson.databind.JsonMappingException',
    -        'com.fasterxml.jackson.databind.JsonNode',
    -        'com.fasterxml.jackson.databind.Module$SetupContext',
    -        'com.fasterxml.jackson.databind.ObjectReader',
    -        'com.fasterxml.jackson.databind.ObjectWriter',
    -        'com.fasterxml.jackson.databind.SerializerProvider',
    -        'com.fasterxml.jackson.databind.deser.std.StdDeserializer',
    -        'com.fasterxml.jackson.databind.deser.std.StdScalarDeserializer',
    -        'com.fasterxml.jackson.databind.module.SimpleModule',
    -        'com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter',
    -        'com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider',
    -        'com.fasterxml.jackson.databind.ser.std.StdScalarSerializer',
    -        'com.fasterxml.jackson.databind.ser.std.StdSerializer',
    -        'com.fasterxml.jackson.dataformat.xml.JacksonXmlModule',
    -        'com.fasterxml.jackson.dataformat.xml.XmlMapper',
    -        'com.fasterxml.jackson.dataformat.xml.util.DefaultXmlPrettyPrinter',
    -        'com.fasterxml.jackson.databind.node.JsonNodeFactory',
    -        'com.fasterxml.jackson.databind.node.ObjectNode',
    -        'com.lmax.disruptor.BlockingWaitStrategy',
    -        'com.lmax.disruptor.BusySpinWaitStrategy',
    -        'com.lmax.disruptor.EventFactory',
    -        'com.lmax.disruptor.EventTranslator',
    -        'com.lmax.disruptor.EventTranslatorTwoArg',
    -        'com.lmax.disruptor.EventTranslatorVararg',
    -        'com.lmax.disruptor.ExceptionHandler',
    -        'com.lmax.disruptor.LifecycleAware',
    -        'com.lmax.disruptor.RingBuffer',
    -        'com.lmax.disruptor.Sequence',
    -        'com.lmax.disruptor.SequenceReportingEventHandler',
    -        'com.lmax.disruptor.SleepingWaitStrategy',
    -        'com.lmax.disruptor.TimeoutBlockingWaitStrategy',
    -        'com.lmax.disruptor.WaitStrategy',
    -        'com.lmax.disruptor.YieldingWaitStrategy',
    -        'com.lmax.disruptor.dsl.Disruptor',
    -        'com.lmax.disruptor.dsl.ProducerType',
    -        'javax.jms.Connection',
    -        'javax.jms.ConnectionFactory',
    -        'javax.jms.Destination',
    -        'javax.jms.JMSException',
    -        'javax.jms.MapMessage',
    -        'javax.jms.Message',
    -        'javax.jms.MessageConsumer',
    -        'javax.jms.MessageProducer',
    -        'javax.jms.Session',
    -        'javax.mail.Authenticator',
    -        'javax.mail.Message$RecipientType',
    -        'javax.mail.PasswordAuthentication',
    -        'javax.mail.Session',
    -        'javax.mail.Transport',
    -        'javax.mail.internet.InternetAddress',
    -        'javax.mail.internet.InternetHeaders',
    -        'javax.mail.internet.MimeBodyPart',
    -        'javax.mail.internet.MimeMessage',
    -        'javax.mail.internet.MimeMultipart',
    -        'javax.mail.internet.MimeUtility',
    -        'javax.mail.util.ByteArrayDataSource',
    -        'javax.persistence.AttributeConverter',
    -        'javax.persistence.EntityManager',
    -        'javax.persistence.EntityManagerFactory',
    -        'javax.persistence.EntityTransaction',
    -        'javax.persistence.Persistence',
    -        'javax.persistence.PersistenceException',
    -        'org.apache.commons.compress.compressors.CompressorStreamFactory',
    -        'org.apache.commons.compress.utils.IOUtils',
    -        'org.apache.commons.csv.CSVFormat',
    -        'org.apache.commons.csv.QuoteMode',
    -        'org.apache.kafka.clients.producer.Callback',
    -        'org.apache.kafka.clients.producer.KafkaProducer',
    -        'org.apache.kafka.clients.producer.Producer',
    -        'org.apache.kafka.clients.producer.ProducerRecord',
    -        'org.apache.kafka.clients.producer.RecordMetadata',
    -        'org.codehaus.stax2.XMLStreamWriter2',
    -        'org.jctools.queues.MessagePassingQueue$Consumer',
    -        'org.jctools.queues.MpscArrayQueue',
    -        'org.osgi.framework.AdaptPermission',
    -        'org.osgi.framework.AdminPermission',
    -        'org.osgi.framework.Bundle',
    -        'org.osgi.framework.BundleActivator',
    -        'org.osgi.framework.BundleContext',
    -        'org.osgi.framework.BundleEvent',
    -        'org.osgi.framework.BundleReference',
    -        'org.osgi.framework.FrameworkUtil',
    -        'org.osgi.framework.ServiceRegistration',
    -        'org.osgi.framework.SynchronousBundleListener',
    -        'org.osgi.framework.wiring.BundleWire',
    -        'org.osgi.framework.wiring.BundleWiring',
    -        'org.zeromq.ZMQ$Context',
    -        'org.zeromq.ZMQ$Socket',
    -        'org.zeromq.ZMQ'
    -]
    diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/AbstractSqlRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/AbstractSqlRequest.java
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/AbstractSqlRequest.java
    rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/AbstractSqlRequest.java
    diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java
    rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ColumnInfo.java
    diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/MainResponse.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/MainResponse.java
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/MainResponse.java
    rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/MainResponse.java
    diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java
    rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java
    diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ProtoUtils.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ProtoUtils.java
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ProtoUtils.java
    rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/ProtoUtils.java
    diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java
    rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java
    diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorRequest.java
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorRequest.java
    rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorRequest.java
    diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorResponse.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorResponse.java
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorResponse.java
    rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlClearCursorResponse.java
    diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java
    rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java
    diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryResponse.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryResponse.java
    similarity index 97%
    rename from x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryResponse.java
    rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryResponse.java
    index f048bcb170a52..4cb500fe8025b 100644
    --- a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryResponse.java
    +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryResponse.java
    @@ -18,7 +18,6 @@
     
     import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
     import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
    -import static org.elasticsearch.xpack.sql.proto.ProtoUtils.parseFieldsValue;
     
     /**
      * Response to perform an sql query for JDBC/CLI client
    @@ -94,7 +93,7 @@ public static List parseRow(XContentParser parser) throws IOException {
             List list = new ArrayList<>();
             while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
                 if (parser.currentToken().isValue()) {
    -                list.add(parseFieldsValue(parser));
    +                list.add(ProtoUtils.parseFieldsValue(parser));
                 } else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
                     list.add(null);
                 } else {
    diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlTypedParamValue.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlTypedParamValue.java
    similarity index 100%
    rename from x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlTypedParamValue.java
    rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlTypedParamValue.java
    diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java
    similarity index 93%
    rename from x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java
    rename to x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java
    index c024af48187d3..3f77bc2fc2ed7 100644
    --- a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java
    +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java
    @@ -61,11 +61,6 @@ public enum DataType {
          */
         public final JDBCType jdbcType;
     
    -    /**
    -     * Name of corresponding java class
    -     */
    -    public final String javaName;
    -
         /**
          * Size of the type in bytes
          * 

    @@ -105,10 +100,12 @@ public enum DataType { */ public final boolean defaultDocValues; + private final Class javaClass; + DataType(JDBCType jdbcType, Class javaClass, int size, int defaultPrecision, int displaySize, boolean isInteger, boolean isRational, boolean defaultDocValues) { this.esType = name().toLowerCase(Locale.ROOT); - this.javaName = javaClass == null ? null : javaClass.getName(); + this.javaClass = javaClass; this.jdbcType = jdbcType; this.size = size; this.defaultPrecision = defaultPrecision; @@ -125,6 +122,10 @@ public enum DataType { public String sqlName() { return jdbcType.getName(); } + + public Class javaClass() { + return javaClass; + } public boolean isNumeric() { return isInteger || isRational; @@ -152,6 +153,13 @@ public static DataType fromJdbcType(JDBCType jdbcType) { } return jdbcToEs.get(jdbcType); } + + public static Class fromJdbcTypeToJava(JDBCType jdbcType) { + if (jdbcToEs.containsKey(jdbcType) == false) { + throw new IllegalArgumentException("Unsupported JDBC type [" + jdbcType + "]"); + } + return jdbcToEs.get(jdbcType).javaClass(); + } /** * Creates returns DataType enum coresponding to the specified es type diff --git a/x-pack/plugin/sql/sql-shared-proto/src/test/java/org/elasticsearch/xpack/sql/proto/ProtoUtilsTests.java b/x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/proto/ProtoUtilsTests.java similarity index 100% rename from x-pack/plugin/sql/sql-shared-proto/src/test/java/org/elasticsearch/xpack/sql/proto/ProtoUtilsTests.java rename to x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/proto/ProtoUtilsTests.java diff --git a/x-pack/plugin/sql/sql-shared-proto/build.gradle b/x-pack/plugin/sql/sql-shared-proto/build.gradle deleted file mode 100644 index 5a1439f4360bc..0000000000000 --- a/x-pack/plugin/sql/sql-shared-proto/build.gradle +++ /dev/null @@ -1,35 +0,0 @@ - -/* - * This project contains XContent protocol classes shared between server and http client - */ - -import org.elasticsearch.gradle.precommit.PrecommitTasks - -apply plugin: 'elasticsearch.build' - -description = 'Request and response objects shared by the cli, jdbc ' + - 'and the Elasticsearch plugin' - -dependencies { - compile (project(':libs:core')) { - transitive = false - } - compile (project(':libs:x-content')) { - transitive = false - } - compile 'joda-time:joda-time:2.9.9' - runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" - - testCompile "org.elasticsearch.test:framework:${version}" -} - -forbiddenApisMain { - //sql does not depend on server, so only jdk signatures should be checked - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] -} - -dependencyLicenses { - mapping from: /elasticsearch-core.*/, to: 'elasticsearch' - mapping from: /jackson-.*/, to: 'jackson' - ignoreSha 'elasticsearch-core' -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Range.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Range.java index 54d541ab406b7..c17e9634492a1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Range.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Range.java @@ -87,7 +87,7 @@ public Object fold() { } /** - * Check whether the boundaries are invalid ( upper < lower) or not. + * Check whether the boundaries are invalid ( upper < lower) or not. * If they do, the value does not have to be evaluate. */ private boolean areBoundariesInvalid() { @@ -139,4 +139,4 @@ public String toString() { sb.append(upper); return sb.toString(); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/CliFormatterCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/CliFormatterCursor.java index 94422dd2888bf..b226e899e4d09 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/CliFormatterCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/CliFormatterCursor.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.action.CliFormatter; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.RowSet; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java index 175b78d4f6655..e606eace9759d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java @@ -12,6 +12,8 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.sql.action.SqlClearCursorAction; +import org.elasticsearch.xpack.sql.action.SqlClearCursorRequest; import org.elasticsearch.xpack.sql.proto.Mode; import org.elasticsearch.xpack.sql.proto.Protocol; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java index a8daa1136d390..387b7d8727ec2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java @@ -18,6 +18,9 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestResponseListener; +import org.elasticsearch.xpack.sql.action.SqlQueryAction; +import org.elasticsearch.xpack.sql.action.SqlQueryRequest; +import org.elasticsearch.xpack.sql.action.SqlQueryResponse; import org.elasticsearch.xpack.sql.proto.Mode; import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.session.Cursor; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java index 74d94e4800606..731046f923fef 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java @@ -12,6 +12,8 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.sql.action.SqlTranslateAction; +import org.elasticsearch.xpack.sql.action.SqlTranslateRequest; import org.elasticsearch.xpack.sql.proto.Mode; import java.io.IOException; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java index 24bf8f15aa7de..6c026b2607161 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java @@ -31,6 +31,9 @@ import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.sql.action.SqlClearCursorAction; +import org.elasticsearch.xpack.sql.action.SqlQueryAction; +import org.elasticsearch.xpack.sql.action.SqlTranslateAction; import org.elasticsearch.xpack.sql.analysis.index.IndexResolver; import org.elasticsearch.xpack.sql.execution.PlanExecutor; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormat.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormat.java index 9d0cd60c23e32..de8798ecf544b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormat.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormat.java @@ -7,6 +7,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.xpack.sql.action.SqlQueryResponse; +import org.elasticsearch.xpack.sql.action.CliFormatter; import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursors; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java index 7a216f3a2bf9f..be0aae2001c5a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java @@ -13,12 +13,14 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.sql.action.SqlClearCursorRequest; +import org.elasticsearch.xpack.sql.action.SqlClearCursorResponse; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursors; -import static org.elasticsearch.xpack.sql.plugin.SqlClearCursorAction.NAME; +import static org.elasticsearch.xpack.sql.action.SqlClearCursorAction.NAME; public class TransportSqlClearCursorAction extends HandledTransportAction { private final PlanExecutor planExecutor; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 7993f00d71aee..e491f76749bdc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -15,6 +15,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.sql.action.SqlQueryAction; +import org.elasticsearch.xpack.sql.action.SqlQueryRequest; +import org.elasticsearch.xpack.sql.action.SqlQueryResponse; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.session.Configuration; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java index 4ef7c14ab01f3..95a10497fdc54 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java @@ -13,6 +13,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.sql.action.SqlTranslateAction; +import org.elasticsearch.xpack.sql.action.SqlTranslateRequest; +import org.elasticsearch.xpack.sql.action.SqlTranslateResponse; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.session.Configuration; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CliFormatterTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/CliFormatterTests.java similarity index 95% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CliFormatterTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/CliFormatterTests.java index d87dba3306889..400a90712d6cf 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CliFormatterTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/CliFormatterTests.java @@ -3,9 +3,11 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.plugin; +package org.elasticsearch.xpack.sql.action; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.action.CliFormatter; +import org.elasticsearch.xpack.sql.action.SqlQueryResponse; import org.elasticsearch.xpack.sql.proto.ColumnInfo; import java.sql.JDBCType; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java index ef90e8ccd470e..cbdf0e1ac9811 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java @@ -7,10 +7,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.xpack.sql.plugin.SqlQueryRequestBuilder; import org.elasticsearch.xpack.sql.proto.ColumnInfo; -import org.elasticsearch.xpack.sql.plugin.SqlQueryAction; -import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse; import org.elasticsearch.xpack.sql.proto.Mode; import java.sql.JDBCType; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorActionIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorActionIT.java index 643372a21fe0a..de55d486555ad 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorActionIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorActionIT.java @@ -8,12 +8,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.xpack.sql.plugin.SqlClearCursorAction; -import org.elasticsearch.xpack.sql.plugin.SqlClearCursorRequestBuilder; -import org.elasticsearch.xpack.sql.plugin.SqlClearCursorResponse; -import org.elasticsearch.xpack.sql.plugin.SqlQueryAction; -import org.elasticsearch.xpack.sql.plugin.SqlQueryRequestBuilder; -import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse; import org.elasticsearch.xpack.sql.session.Cursor; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlDisabledIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlDisabledIT.java index 395a2b673f946..c50f1095164df 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlDisabledIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlDisabledIT.java @@ -7,8 +7,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.sql.plugin.SqlQueryAction; -import org.elasticsearch.xpack.sql.plugin.SqlQueryRequestBuilder; import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.startsWith; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java index a755d2b4f59c1..df0875690a2c2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java @@ -21,12 +21,6 @@ import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.transport.Netty4Plugin; import org.elasticsearch.transport.nio.NioTransportPlugin; -import org.elasticsearch.xpack.sql.plugin.SqlQueryAction; -import org.elasticsearch.xpack.sql.plugin.SqlQueryRequestBuilder; -import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse; -import org.elasticsearch.xpack.sql.plugin.SqlTranslateAction; -import org.elasticsearch.xpack.sql.plugin.SqlTranslateRequestBuilder; -import org.elasticsearch.xpack.sql.plugin.SqlTranslateResponse; import org.hamcrest.Matchers; import org.junit.Before; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java index 2ef7c084d110d..e413a590dfaf1 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java @@ -11,9 +11,6 @@ import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.sort.SortBuilders; -import org.elasticsearch.xpack.sql.plugin.SqlTranslateAction; -import org.elasticsearch.xpack.sql.plugin.SqlTranslateRequestBuilder; -import org.elasticsearch.xpack.sql.plugin.SqlTranslateResponse; import static java.util.Collections.singletonList; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CursorTests.java index bac221df2e92d..7eb4e5402cd36 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CursorTests.java @@ -12,10 +12,10 @@ import org.elasticsearch.client.Client; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.SqlException; -import org.elasticsearch.xpack.sql.plugin.CliFormatter; +import org.elasticsearch.xpack.sql.action.CliFormatter; import org.elasticsearch.xpack.sql.plugin.CliFormatterCursor; import org.elasticsearch.xpack.sql.proto.ColumnInfo; -import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse; +import org.elasticsearch.xpack.sql.action.SqlQueryResponse; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursors; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/TextFormatTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/TextFormatTests.java index bf6ccbb225a54..7a0f6dbca9d13 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/TextFormatTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/TextFormatTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.xpack.sql.action.SqlQueryResponse; import org.elasticsearch.xpack.sql.proto.ColumnInfo; import java.util.ArrayList; diff --git a/x-pack/plugin/upgrade/build.gradle b/x-pack/plugin/upgrade/build.gradle index 5cead96ac7aa5..8e65f87da3070 100644 --- a/x-pack/plugin/upgrade/build.gradle +++ b/x-pack/plugin/upgrade/build.gradle @@ -35,7 +35,7 @@ task internalClusterTest(type: RandomizedTestingTask, dependsOn: test.dependsOn) { configure(BuildPlugin.commonTestConfig(project)) classpath = project.test.classpath - testClassesDir = project.test.testClassesDir + testClassesDirs = project.test.testClassesDirs include '**/*IT.class' systemProperty 'es.set.netty.runtime.available.processors', 'false' } diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index b2bb7a63f6fb4..78ac1436fd8bc 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -184,6 +184,7 @@ subprojects { systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT") systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") exclude 'org/elasticsearch/upgrades/FullClusterRestartIT.class' + exclude 'org/elasticsearch/upgrades/QueryBuilderBWCIT.class' } Task upgradedClusterTest = tasks.create(name: "${baseName}#upgradedClusterTest", type: RestIntegTestTask) @@ -222,6 +223,7 @@ subprojects { systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT") systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") exclude 'org/elasticsearch/upgrades/FullClusterRestartIT.class' + exclude 'org/elasticsearch/upgrades/QueryBuilderBWCIT.class' } Task versionBwcTest = tasks.create(name: "${baseName}#bwcTest") { diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/QueryBuilderBWCIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/QueryBuilderBWCIT.java new file mode 100644 index 0000000000000..1c4cba6aa8caa --- /dev/null +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/QueryBuilderBWCIT.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.restart; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; + +import java.nio.charset.StandardCharsets; +import java.util.Base64; + +public class QueryBuilderBWCIT extends org.elasticsearch.upgrades.QueryBuilderBWCIT { + + @Override + protected Settings restClientSettings() { + String token = "Basic " + Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8)); + return Settings.builder() + .put(ThreadContext.PREFIX + ".Authorization", token) + .build(); + } +} diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexAuditUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexAuditUpgradeIT.java index 7c81a7141a991..92a8c3d2f830f 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexAuditUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexAuditUpgradeIT.java @@ -87,7 +87,6 @@ private void assertNumUniqueNodeNameBuckets(int numBuckets) throws Exception { /** * Has the master been upgraded to the new version? - * @throws IOException */ private boolean masterIsNewVersion() throws IOException { Map map = entityAsMap(client().performRequest("GET", "/_nodes/_master")); diff --git a/x-pack/qa/sql/build.gradle b/x-pack/qa/sql/build.gradle index 0bea3a9364b71..4d061fffa9110 100644 --- a/x-pack/qa/sql/build.gradle +++ b/x-pack/qa/sql/build.gradle @@ -89,7 +89,7 @@ subprojects { testRuntime "net.sourceforge.csvjdbc:csvjdbc:1.0.34" testRuntime "com.h2database:h2:1.4.197" testRuntime project(path: xpackModule('sql:jdbc'), configuration: 'nodeps') - testRuntime xpackProject('plugin:sql:sql-shared-client') + testRuntime xpackProject('plugin:sql:sql-client') // TODO check if needed @@ -99,7 +99,7 @@ subprojects { // CLI testing dependencies testRuntime project(path: xpackModule('sql:sql-cli'), configuration: 'nodeps') - testRuntime (xpackProject('plugin:sql:sql-proto')) { + testRuntime (xpackProject('plugin:sql:sql-action')) { transitive = false } testRuntime "org.jline:jline:3.6.0"