diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index e7ee5a059ab37..80d1982300dd1 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -17,17 +17,6 @@ * under the License. */ -buildscript { - repositories { - maven { - url 'https://plugins.gradle.org/m2/' - } - } - dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' - } -} - apply plugin: 'elasticsearch.build' // order of this section matters, see: https://github.com/johnrengelman/shadow/issues/336 @@ -81,10 +70,6 @@ thirdPartyAudit.excludes = [ 'org.openjdk.jmh.util.Utils' ] -shadowJar { - classifier = 'benchmarks' -} - runShadow { executable = new File(project.runtimeJavaHome, 'bin/java') } diff --git a/build.gradle b/build.gradle index a75d093664fe6..66f34d8f445de 100644 --- a/build.gradle +++ b/build.gradle @@ -17,7 +17,7 @@ * under the License. */ - +import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import org.apache.tools.ant.taskdefs.condition.Os import org.apache.tools.ant.filters.ReplaceTokens import org.elasticsearch.gradle.BuildPlugin @@ -222,7 +222,7 @@ subprojects { "org.elasticsearch.gradle:build-tools:${version}": ':build-tools', "org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec', "org.elasticsearch:elasticsearch:${version}": ':server', - "org.elasticsearch:elasticsearch-cli:${version}": ':libs:cli', + "org.elasticsearch:elasticsearch-cli:${version}": ':libs:elasticsearch-cli', "org.elasticsearch:elasticsearch-core:${version}": ':libs:core', "org.elasticsearch:elasticsearch-nio:${version}": ':libs:nio', "org.elasticsearch:elasticsearch-x-content:${version}": ':libs:x-content', @@ -303,18 +303,55 @@ subprojects { if (project.plugins.hasPlugin(BuildPlugin)) { String artifactsHost = VersionProperties.elasticsearch.isSnapshot() ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co" Closure sortClosure = { a, b -> b.group <=> a.group } - Closure depJavadocClosure = { dep -> - if (dep.group != null && dep.group.startsWith('org.elasticsearch')) { - Project upstreamProject = dependencyToProject(dep) - if (upstreamProject != null) { - project.javadoc.dependsOn "${upstreamProject.path}:javadoc" - String artifactPath = dep.group.replaceAll('\\.', '/') + '/' + dep.name.replaceAll('\\.', '/') + '/' + dep.version - project.javadoc.options.linksOffline artifactsHost + "/javadoc/" + artifactPath, "${upstreamProject.buildDir}/docs/javadoc/" + Closure depJavadocClosure = { shadowed, dep -> + if (dep.group == null || false == dep.group.startsWith('org.elasticsearch')) { + return + } + Project upstreamProject = dependencyToProject(dep) + if (upstreamProject == null) { + return + } + if (shadowed) { + /* + * Include the source of shadowed upstream projects so we don't + * have to publish their javadoc. + */ + project.evaluationDependsOn(upstreamProject.path) + project.javadoc.source += upstreamProject.javadoc.source + /* + * Do not add those projects to the javadoc classpath because + * we are going to resolve them with their source instead. + */ + project.javadoc.classpath = project.javadoc.classpath.filter { f -> + false == upstreamProject.configurations.archives.artifacts.files.files.contains(f) } + /* + * Instead we need the upstream project's javadoc classpath so + * we don't barf on the classes that it references. + */ + project.javadoc.classpath += upstreamProject.javadoc.classpath + } else { + // Link to non-shadowed dependant projects + project.javadoc.dependsOn "${upstreamProject.path}:javadoc" + String artifactPath = dep.group.replaceAll('\\.', '/') + '/' + dep.name.replaceAll('\\.', '/') + '/' + dep.version + project.javadoc.options.linksOffline artifactsHost + "/javadoc/" + artifactPath, "${upstreamProject.buildDir}/docs/javadoc/" } } - project.configurations.compile.dependencies.findAll().toSorted(sortClosure).each(depJavadocClosure) - project.configurations.compileOnly.dependencies.findAll().toSorted(sortClosure).each(depJavadocClosure) + boolean hasShadow = project.plugins.hasPlugin(ShadowPlugin) + project.configurations.compile.dependencies + .findAll() + .toSorted(sortClosure) + .each({ c -> depJavadocClosure(hasShadow, c) }) + project.configurations.compileOnly.dependencies + .findAll() + .toSorted(sortClosure) + .each({ c -> depJavadocClosure(hasShadow, c) }) + if (hasShadow) { + project.configurations.shadow.dependencies + .findAll() + .toSorted(sortClosure) + .each({ c -> depJavadocClosure(false, c) }) + } } } } @@ -537,6 +574,7 @@ subprojects { project -> commandLine "${->new File(rootProject.compilerJavaHome, 'bin/jar')}", 'xf', "${-> jarTask.outputs.files.singleFile}", 'META-INF/LICENSE.txt', 'META-INF/NOTICE.txt' workingDir destination + onlyIf {jarTask.enabled} doFirst { project.delete(destination) Files.createDirectories(destination) @@ -545,6 +583,7 @@ subprojects { project -> final Task checkNotice = project.task("verify${jarTask.name.capitalize()}Notice") { dependsOn extract + onlyIf {jarTask.enabled} doLast { final List noticeLines = Files.readAllLines(project.noticeFile.toPath()) final Path noticePath = extract.destination.resolve('META-INF/NOTICE.txt') @@ -555,6 +594,7 @@ subprojects { project -> final Task checkLicense = project.task("verify${jarTask.name.capitalize()}License") { dependsOn extract + onlyIf {jarTask.enabled} doLast { final List licenseLines = Files.readAllLines(project.licenseFile.toPath()) final Path licensePath = extract.destination.resolve('META-INF/LICENSE.txt') @@ -582,6 +622,21 @@ gradle.projectsEvaluated { } } } + // Having the same group and name for distinct projects causes Gradle to consider them equal when resolving + // dependencies leading to hard to debug failures. Run a check across all project to prevent this from happening. + // see: https://github.com/gradle/gradle/issues/847 + Map coordsToProject = [:] + project.allprojects.forEach { p -> + String coords = "${p.group}:${p.name}" + if (false == coordsToProject.putIfAbsent(coords, p)) { + throw new GradleException( + "Detected that two projects: ${p.path} and ${coordsToProject[coords].path} " + + "have the same name and group: ${coords}. " + + "This doesn't currently work correctly in Gradle, see: " + + "https://github.com/gradle/gradle/issues/847" + ) + } + } } if (System.properties.get("build.compare") != null) { @@ -596,7 +651,7 @@ if (System.properties.get("build.compare") != null) { } } sourceBuild { - gradleVersion = "4.8.1" // does not default to gradle weapper of project dir, but current version + gradleVersion = gradle.getGradleVersion() projectDir = referenceProject tasks = ["clean", "assemble"] arguments = ["-Dbuild.compare_friendly=true"] diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 3d100daf7d65f..eb95ff148f63c 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -104,6 +104,7 @@ dependencies { compile 'de.thetaphi:forbiddenapis:2.5' compile 'org.apache.rat:apache-rat:0.11' compile "org.elasticsearch:jna:4.5.1" + compile 'com.github.jengelman.gradle.plugins:shadow:2.0.4' testCompile "junit:junit:${props.getProperty('junit')}" } diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy index 24b66efbcef2c..d4c8f89bf50cf 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy @@ -74,7 +74,7 @@ class RandomizedTestingPlugin implements Plugin { // since we can't be sure if the task was ever realized, we remove both the provider and the task TaskProvider oldTestProvider try { - oldTestProvider = tasks.getByNameLater(Test, 'test') + oldTestProvider = tasks.named('test') } catch (UnknownTaskException unused) { // no test task, ok, user will use testing task on their own return diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 89e10c50ff782..c5dd19de3cc5a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -19,6 +19,7 @@ package org.elasticsearch.gradle import com.carrotsearch.gradle.junit4.RandomizedTestingTask +import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import org.apache.tools.ant.taskdefs.condition.Os import org.eclipse.jgit.lib.Constants import org.eclipse.jgit.lib.RepositoryBuilder @@ -36,12 +37,14 @@ import org.gradle.api.artifacts.ModuleDependency import org.gradle.api.artifacts.ModuleVersionIdentifier import org.gradle.api.artifacts.ProjectDependency import org.gradle.api.artifacts.ResolvedArtifact +import org.gradle.api.artifacts.SelfResolvingDependency import org.gradle.api.artifacts.dsl.RepositoryHandler import org.gradle.api.execution.TaskExecutionGraph import org.gradle.api.plugins.JavaPlugin import org.gradle.api.publish.maven.MavenPublication import org.gradle.api.publish.maven.plugins.MavenPublishPlugin import org.gradle.api.publish.maven.tasks.GenerateMavenPom +import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.bundling.Jar import org.gradle.api.tasks.compile.GroovyCompile import org.gradle.api.tasks.compile.JavaCompile @@ -498,7 +501,41 @@ class BuildPlugin implements Plugin { } } } + project.plugins.withType(ShadowPlugin).whenPluginAdded { + project.publishing { + publications { + nebula(MavenPublication) { + artifact project.tasks.shadowJar + artifactId = project.archivesBaseName + /* + * Configure the pom to include the "shadow" as compile dependencies + * because that is how we're using them but remove all other dependencies + * because they've been shaded into the jar. + */ + pom.withXml { XmlProvider xml -> + Node root = xml.asNode() + root.remove(root.dependencies) + Node dependenciesNode = root.appendNode('dependencies') + project.configurations.shadow.allDependencies.each { + if (false == it instanceof SelfResolvingDependency) { + Node dependencyNode = dependenciesNode.appendNode('dependency') + dependencyNode.appendNode('groupId', it.group) + dependencyNode.appendNode('artifactId', it.name) + dependencyNode.appendNode('version', it.version) + dependencyNode.appendNode('scope', 'compile') + } + } + // Be tidy and remove the element if it is empty + if (dependenciesNode.children.empty) { + root.remove(dependenciesNode) + } + } + } + } + } + } } + } /** Adds compiler settings to the project */ @@ -660,6 +697,28 @@ class BuildPlugin implements Plugin { } } } + project.plugins.withType(ShadowPlugin).whenPluginAdded { + /* + * When we use the shadow plugin we entirely replace the + * normal jar with the shadow jar so we no longer want to run + * the jar task. + */ + project.tasks.jar.enabled = false + project.tasks.shadowJar { + /* + * Replace the default "shadow" classifier with null + * which will leave the classifier off of the file name. + */ + classifier = null + /* + * Not all cases need service files merged but it is + * better to be safe + */ + mergeServiceFiles() + } + // Make sure we assemble the shadow jar + project.tasks.assemble.dependsOn project.tasks.shadowJar + } } /** Returns a closure of common configuration shared by unit and integration tests. */ @@ -691,7 +750,6 @@ class BuildPlugin implements Plugin { systemProperty 'tests.task', path systemProperty 'tests.security.manager', 'true' systemProperty 'jna.nosys', 'true' - systemProperty 'es.scripting.exception_for_missing_value', 'true' // TODO: remove setting logging level via system property systemProperty 'tests.logger.level', 'WARN' for (Map.Entry property : System.properties.entrySet()) { @@ -744,6 +802,18 @@ class BuildPlugin implements Plugin { } exclude '**/*$*.class' + + project.plugins.withType(ShadowPlugin).whenPluginAdded { + /* + * If we make a shaded jar we test against it. + */ + classpath -= project.tasks.compileJava.outputs.files + classpath -= project.configurations.compile + classpath -= project.configurations.runtime + classpath += project.configurations.shadow + classpath += project.tasks.shadowJar.outputs.files + dependsOn project.tasks.shadowJar + } } } @@ -764,9 +834,28 @@ class BuildPlugin implements Plugin { additionalTest.configure(commonTestConfig(project)) additionalTest.configure(config) additionalTest.dependsOn(project.tasks.testClasses) - test.dependsOn(additionalTest) + project.check.dependsOn(additionalTest) }); - return test + + project.plugins.withType(ShadowPlugin).whenPluginAdded { + /* + * We need somewhere to configure dependencies that we don't wish + * to shade into the jar. The shadow plugin creates a "shadow" + * configuration which is *almost* exactly that. It is never + * bundled into the shaded jar but is used for main source + * compilation. Unfortunately, by default it is not used for + * *test* source compilation and isn't used in tests at all. This + * change makes it available for test compilation. + * + * Note that this isn't going to work properly with qa projects + * but they have no business applying the shadow plugin in the + * firstplace. + */ + SourceSet testSourceSet = project.sourceSets.findByName('test') + if (testSourceSet != null) { + testSourceSet.compileClasspath += project.configurations.shadow + } + } } private static configurePrecommit(Project project) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index eb4da8d1f314c..7f6f337e8a906 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -18,11 +18,13 @@ */ package org.elasticsearch.gradle.plugin +import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import nebula.plugin.info.scm.ScmInfoPlugin import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.NoticeTask import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask +import org.gradle.api.InvalidUserDataException import org.gradle.api.JavaVersion import org.gradle.api.Project import org.gradle.api.Task @@ -46,6 +48,18 @@ public class PluginBuildPlugin extends BuildPlugin { @Override public void apply(Project project) { super.apply(project) + project.plugins.withType(ShadowPlugin).whenPluginAdded { + /* + * We've not tested these plugins together and we're fairly sure + * they aren't going to work properly as is *and* we're not really + * sure *why* you'd want to shade stuff in plugins. So we throw an + * exception here to make you come and read this comment. If you + * have a need for shadow while building plugins then know that you + * are probably going to have to fight with gradle for a while.... + */ + throw new InvalidUserDataException('elasticsearch.esplugin is not ' + + 'compatible with com.github.johnrengelman.shadow'); + } configureDependencies(project) // this afterEvaluate must happen before the afterEvaluate added by integTest creation, // so that the file name resolution for installing the plugin will be setup @@ -61,10 +75,10 @@ public class PluginBuildPlugin extends BuildPlugin { // and generate a different pom for the zip addClientJarPomGeneration(project) addClientJarTask(project) - } else { - // no client plugin, so use the pom file from nebula, without jar, for the zip - project.ext.set("nebulaPublish.maven.jar", false) } + // while the jar isn't normally published, we still at least build a pom of deps + // in case it is published, for instance when other plugins extend this plugin + configureJarPom(project) project.integTestCluster.dependsOn(project.bundlePlugin) project.tasks.run.dependsOn(project.bundlePlugin) @@ -80,7 +94,6 @@ public class PluginBuildPlugin extends BuildPlugin { } if (isModule == false || isXPackModule) { - addZipPomGeneration(project) addNoticeGeneration(project) } @@ -225,36 +238,15 @@ public class PluginBuildPlugin extends BuildPlugin { } } - /** Adds a task to generate a pom file for the zip distribution. */ - public static void addZipPomGeneration(Project project) { + /** Configure the pom for the main jar of this plugin */ + protected static void configureJarPom(Project project) { project.plugins.apply(ScmInfoPlugin.class) project.plugins.apply(MavenPublishPlugin.class) project.publishing { publications { - zip(MavenPublication) { - artifact project.bundlePlugin - } - /* HUGE HACK: the underlying maven publication library refuses to deploy any attached artifacts - * when the packaging type is set to 'pom'. But Sonatype's OSS repositories require source files - * for artifacts that are of type 'zip'. We already publish the source and javadoc for Elasticsearch - * under the various other subprojects. So here we create another publication using the same - * name that has the "real" pom, and rely on the fact that gradle will execute the publish tasks - * in alphabetical order. This lets us publish the zip file and even though the pom says the - * type is 'pom' instead of 'zip'. We cannot setup a dependency between the tasks because the - * publishing tasks are created *extremely* late in the configuration phase, so that we cannot get - * ahold of the actual task. Furthermore, this entire hack only exists so we can make publishing to - * maven local work, since we publish to maven central externally. */ - zipReal(MavenPublication) { - artifactId = project.pluginProperties.extension.name - pom.withXml { XmlProvider xml -> - Node root = xml.asNode() - root.appendNode('name', project.pluginProperties.extension.name) - root.appendNode('description', project.pluginProperties.extension.description) - root.appendNode('url', urlFromOrigin(project.scminfo.origin)) - Node scmNode = root.appendNode('scm') - scmNode.appendNode('url', project.scminfo.origin) - } + nebula(MavenPublication) { + artifactId project.pluginProperties.extension.name } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index 5c363ac043aff..b29bb7a8cd3b7 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -137,7 +137,12 @@ class ClusterConfiguration { this.project = project } - Map systemProperties = new HashMap<>() + // **Note** for systemProperties, settings, keystoreFiles etc: + // value could be a GString that is evaluated to just a String + // there are cases when value depends on task that is not executed yet on configuration stage + Map systemProperties = new HashMap<>() + + Map environmentVariables = new HashMap<>() Map settings = new HashMap<>() @@ -157,10 +162,15 @@ class ClusterConfiguration { List dependencies = new ArrayList<>() @Input - void systemProperty(String property, String value) { + void systemProperty(String property, Object value) { systemProperties.put(property, value) } + @Input + void environment(String variable, Object value) { + environmentVariables.put(variable, value) + } + @Input void setting(String name, Object value) { settings.put(name, value) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 0349130076cfc..4ede349b206d6 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -609,7 +609,6 @@ class ClusterFormationTasks { /** Adds a task to start an elasticsearch node with the given configuration */ static Task configureStartTask(String name, Project project, Task setup, NodeInfo node) { - // this closure is converted into ant nodes by groovy's AntBuilder Closure antRunner = { AntBuilder ant -> ant.exec(executable: node.executable, spawn: node.config.daemonize, dir: node.cwd, taskname: 'elasticsearch') { @@ -630,13 +629,6 @@ class ClusterFormationTasks { node.writeWrapperScript() } - // we must add debug options inside the closure so the config is read at execution time, as - // gradle task options are not processed until the end of the configuration phase - if (node.config.debug) { - println 'Running elasticsearch in debug mode, suspending until connected on port 8000' - node.env['ES_JAVA_OPTS'] = '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000' - } - node.getCommandString().eachLine { line -> logger.info(line) } if (logger.isInfoEnabled() || node.config.daemonize == false) { @@ -654,6 +646,27 @@ class ClusterFormationTasks { } start.doLast(elasticsearchRunner) start.doFirst { + // Configure ES JAVA OPTS - adds system properties, assertion flags, remote debug etc + List esJavaOpts = [node.env.get('ES_JAVA_OPTS', '')] + String collectedSystemProperties = node.config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") + esJavaOpts.add(collectedSystemProperties) + esJavaOpts.add(node.config.jvmArgs) + if (Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))) { + // put the enable assertions options before other options to allow + // flexibility to disable assertions for specific packages or classes + // in the cluster-specific options + esJavaOpts.add("-ea") + esJavaOpts.add("-esa") + } + // we must add debug options inside the closure so the config is read at execution time, as + // gradle task options are not processed until the end of the configuration phase + if (node.config.debug) { + println 'Running elasticsearch in debug mode, suspending until connected on port 8000' + esJavaOpts.add('-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000') + } + node.env['ES_JAVA_OPTS'] = esJavaOpts.join(" ") + + // project.logger.info("Starting node in ${node.clusterName} distribution: ${node.config.distribution}") } return start diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 5e67dfa55cfd4..0dd56b863324f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -180,15 +180,8 @@ class NodeInfo { } args.addAll("-E", "node.portsfile=true") - String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") - String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs - if (Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))) { - // put the enable assertions options before other options to allow - // flexibility to disable assertions for specific packages or classes - // in the cluster-specific options - esJavaOpts = String.join(" ", "-ea", "-esa", esJavaOpts) - } - env = ['ES_JAVA_OPTS': esJavaOpts] + env = [:] + env.putAll(config.environmentVariables) for (Map.Entry property : System.properties.entrySet()) { if (property.key.startsWith('tests.es.')) { args.add("-E") diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index f2e6dc8e56186..d2101c48aabdc 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -24,7 +24,6 @@ import org.elasticsearch.gradle.VersionProperties import org.gradle.api.DefaultTask import org.gradle.api.Project import org.gradle.api.Task -import org.gradle.api.Transformer import org.gradle.api.execution.TaskExecutionAdapter import org.gradle.api.internal.tasks.options.Option import org.gradle.api.provider.Property @@ -217,7 +216,7 @@ public class RestIntegTestTask extends DefaultTask { * @param project The project to add the copy task to * @param includePackagedTests true if the packaged tests should be copied, false otherwise */ - private static Task createCopyRestSpecTask(Project project, Provider includePackagedTests) { + static Task createCopyRestSpecTask(Project project, Provider includePackagedTests) { project.configurations { restSpec } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index d4d1d857e90d4..de3c0dfc3285f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -526,7 +526,11 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(batsPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - packagingTest.dependsOn(batsPackagingTest) + // these tests are temporarily disabled for suse boxes while we debug an issue + // https://github.com/elastic/elasticsearch/issues/30295 + if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { + packagingTest.dependsOn(batsPackagingTest) + } } } @@ -565,7 +569,11 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(javaPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - packagingTest.dependsOn(javaPackagingTest) + // these tests are temporarily disabled for suse boxes while we debug an issue + // https://github.com/elastic/elasticsearch/issues/30295 + if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { + packagingTest.dependsOn(javaPackagingTest) + } } /* diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/NamingConventionsTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/NamingConventionsTask.java index cfbb75456bc6c..297586e9ac6f3 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/NamingConventionsTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/NamingConventionsTask.java @@ -16,6 +16,8 @@ import java.io.File; import java.io.FileWriter; import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; import java.util.Objects; /** @@ -30,16 +32,25 @@ public NamingConventionsTask() { final Project project = getProject(); SourceSetContainer sourceSets = getJavaSourceSets(); - final FileCollection classpath = project.files( - // This works because the class only depends on one class from junit that will be available from the - // tests compile classpath. It's the most straight forward way of telling Java where to find the main - // class. - NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation().getPath(), - // the tests to be loaded - checkForTestsInMain ? sourceSets.getByName("main").getRuntimeClasspath() : project.files(), - sourceSets.getByName("test").getCompileClasspath(), - sourceSets.getByName("test").getOutput() - ); + final FileCollection classpath; + try { + URL location = NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation(); + if (location.getProtocol().equals("file") == false) { + throw new GradleException("Unexpected location for NamingConventionCheck class: "+ location); + } + classpath = project.files( + // This works because the class only depends on one class from junit that will be available from the + // tests compile classpath. It's the most straight forward way of telling Java where to find the main + // class. + location.toURI().getPath(), + // the tests to be loaded + checkForTestsInMain ? sourceSets.getByName("main").getRuntimeClasspath() : project.files(), + sourceSets.getByName("test").getCompileClasspath(), + sourceSets.getByName("test").getOutput() + ); + } catch (URISyntaxException e) { + throw new AssertionError(e); + } dependsOn(project.getTasks().matching(it -> "testCompileClasspath".equals(it.getName()))); getInputs().files(classpath); @@ -111,10 +122,6 @@ public void setSuccessMarker(File successMarker) { this.successMarker = successMarker; } - public boolean getSkipIntegTestInDisguise() { - return skipIntegTestInDisguise; - } - public boolean isSkipIntegTestInDisguise() { return skipIntegTestInDisguise; } diff --git a/client/benchmark/build.gradle b/client/benchmark/build.gradle index 77867f5e273f2..0c3238d985346 100644 --- a/client/benchmark/build.gradle +++ b/client/benchmark/build.gradle @@ -17,18 +17,6 @@ * under the License. */ -buildscript { - repositories { - maven { - url 'https://plugins.gradle.org/m2/' - } - } - dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' - } -} - - apply plugin: 'elasticsearch.build' // build an uberjar with all benchmarks apply plugin: 'com.github.johnrengelman.shadow' diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 451452759f507..65c5d094c7170 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -18,19 +18,8 @@ */ import org.elasticsearch.gradle.precommit.PrecommitTasks -import org.gradle.api.XmlProvider -import org.gradle.api.publish.maven.MavenPublication - -buildscript { - repositories { - maven { - url 'https://plugins.gradle.org/m2/' - } - } - dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' - } -} +import org.elasticsearch.gradle.test.RestIntegTestTask +import org.gradle.api.internal.provider.Providers apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.rest-test' @@ -41,48 +30,9 @@ apply plugin: 'com.github.johnrengelman.shadow' group = 'org.elasticsearch.client' archivesBaseName = 'elasticsearch-rest-high-level-client' -publishing { - publications { - nebula(MavenPublication) { - artifact shadowJar - artifactId = archivesBaseName - /* - * Configure the pom to include the "shadow" as compile dependencies - * because that is how we're using them but remove all other dependencies - * because they've been shaded into the jar. - */ - pom.withXml { XmlProvider xml -> - Node root = xml.asNode() - root.remove(root.dependencies) - Node dependenciesNode = root.appendNode('dependencies') - project.configurations.shadow.allDependencies.each { - if (false == it instanceof SelfResolvingDependency) { - Node dependencyNode = dependenciesNode.appendNode('dependency') - dependencyNode.appendNode('groupId', it.group) - dependencyNode.appendNode('artifactId', it.name) - dependencyNode.appendNode('version', it.version) - dependencyNode.appendNode('scope', 'compile') - } - } - } - } - } -} - -/* - * We need somewhere to configure dependencies that we don't wish to shade - * into the high level REST client. The shadow plugin creates a "shadow" - * configuration which is *almost* exactly that. It is never bundled into - * the shaded jar but is used for main source compilation. Unfortunately, - * by default it is not used for *test* source compilation and isn't used - * in tests at all. This change makes it available for test compilation. - * A change below makes it available for testing. - */ -sourceSets { - test { - compileClasspath += configurations.shadow - } -} +//we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions) +Task copyRestSpec = RestIntegTestTask.createCopyRestSpecTask(project, Providers.FALSE) +test.dependsOn(copyRestSpec) dependencies { /* @@ -102,6 +52,8 @@ dependencies { testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "junit:junit:${versions.junit}" testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}" + //this is needed to make RestHighLevelClientTests#testApiNamingConventions work from IDEs + testCompile "org.elasticsearch:rest-api-spec:${version}" } dependencyLicenses { @@ -119,47 +71,6 @@ forbiddenApisMain { signaturesURLs += [file('src/main/resources/forbidden/rest-high-level-signatures.txt').toURI().toURL()] } -shadowJar { - classifier = null - mergeServiceFiles() -} - -// We don't need normal jar, we use shadow jar instead -jar.enabled = false -assemble.dependsOn shadowJar - -javadoc { - /* - * Bundle all of the javadoc from all of the shaded projects into this one - * so we don't *have* to publish javadoc for all of the "client" jars. - */ - configurations.compile.dependencies.all { Dependency dep -> - Project p = dependencyToProject(dep) - if (p != null) { - evaluationDependsOn(p.path) - source += p.sourceSets.main.allJava - } - } -} - -/* - * Use the jar for testing so we have tests of the bundled jar. - * Use the "shadow" configuration for testing because we need things - * in it. - */ -test { - classpath -= compileJava.outputs.files - classpath -= configurations.compile - classpath -= configurations.runtime - classpath += configurations.shadow - classpath += shadowJar.outputs.files - dependsOn shadowJar -} -integTestRunner { - classpath -= compileJava.outputs.files - classpath -= configurations.compile - classpath -= configurations.runtime - classpath += configurations.shadow - classpath += shadowJar.outputs.files - dependsOn shadowJar +integTestCluster { + setting 'xpack.license.self_generated.type', 'trial' } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 2944b49bf18b0..250bbd520dad7 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -174,7 +174,7 @@ public void putMappingAsync(PutMappingRequest putMappingRequest, RequestOptions * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public GetMappingsResponse getMappings(GetMappingsRequest getMappingsRequest, RequestOptions options) throws IOException { + public GetMappingsResponse getMapping(GetMappingsRequest getMappingsRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(getMappingsRequest, RequestConverters::getMappings, options, GetMappingsResponse::fromXContent, emptySet()); } @@ -187,8 +187,8 @@ public GetMappingsResponse getMappings(GetMappingsRequest getMappingsRequest, Re * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void getMappingsAsync(GetMappingsRequest getMappingsRequest, RequestOptions options, - ActionListener listener) { + public void getMappingAsync(GetMappingsRequest getMappingsRequest, RequestOptions options, + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(getMappingsRequest, RequestConverters::getMappings, options, GetMappingsResponse::fromXContent, listener, emptySet()); } @@ -474,8 +474,23 @@ public void getAsync(GetIndexRequest getIndexRequest, RequestOptions options, * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response + * @deprecated use {@link #forcemerge(ForceMergeRequest, RequestOptions)} instead */ + @Deprecated public ForceMergeResponse forceMerge(ForceMergeRequest forceMergeRequest, RequestOptions options) throws IOException { + return forcemerge(forceMergeRequest, options); + } + + /** + * Force merge one or more indices using the Force Merge API. + * See + * Force Merge API on elastic.co + * @param forceMergeRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public ForceMergeResponse forcemerge(ForceMergeRequest forceMergeRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(forceMergeRequest, RequestConverters::forceMerge, options, ForceMergeResponse::fromXContent, emptySet()); } @@ -487,8 +502,22 @@ public ForceMergeResponse forceMerge(ForceMergeRequest forceMergeRequest, Reques * @param forceMergeRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion + * @deprecated use {@link #forcemergeAsync(ForceMergeRequest, RequestOptions, ActionListener)} instead */ + @Deprecated public void forceMergeAsync(ForceMergeRequest forceMergeRequest, RequestOptions options, ActionListener listener) { + forcemergeAsync(forceMergeRequest, options, listener); + } + + /** + * Asynchronously force merge one or more indices using the Force Merge API. + * See + * Force Merge API on elastic.co + * @param forceMergeRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void forcemergeAsync(ForceMergeRequest forceMergeRequest, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(forceMergeRequest, RequestConverters::forceMerge, options, ForceMergeResponse::fromXContent, listener, emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java index 340e14653971b..e889ec5beba80 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java @@ -139,7 +139,7 @@ public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions op * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public SimulatePipelineResponse simulatePipeline(SimulatePipelineRequest request, RequestOptions options) throws IOException { + public SimulatePipelineResponse simulate(SimulatePipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::simulatePipeline, options, SimulatePipelineResponse::fromXContent, emptySet()); } @@ -154,9 +154,9 @@ public SimulatePipelineResponse simulatePipeline(SimulatePipelineRequest request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void simulatePipelineAsync(SimulatePipelineRequest request, - RequestOptions options, - ActionListener listener) { + public void simulateAsync(SimulatePipelineRequest request, + RequestOptions options, + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::simulatePipeline, options, SimulatePipelineResponse::fromXContent, listener, emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 9dbd4916c774b..a6122b0681e91 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -106,6 +106,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; @@ -1097,6 +1098,25 @@ static Request xPackInfo(XPackInfoRequest infoRequest) { return request; } + static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("watcher") + .addPathPartAsIs("watch") + .addPathPart(putWatchRequest.getId()) + .build(); + + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + Params params = new Params(request).withVersion(putWatchRequest.getVersion()); + if (putWatchRequest.isActive() == false) { + params.putParam("active", "false"); + } + ContentType contentType = createContentType(putWatchRequest.xContentType()); + BytesReference source = putWatchRequest.getSource(); + request.setEntity(new ByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType)); + return request; + } + static Request xpackUsage(XPackUsageRequest usageRequest) { Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage"); Params parameters = new Params(request); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index b9e41b879328f..c71bebf6903ca 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -384,8 +384,23 @@ public final void getAsync(GetRequest getRequest, RequestOptions options, Action * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response + * @deprecated use {@link #mget(MultiGetRequest, RequestOptions)} instead */ + @Deprecated public final MultiGetResponse multiGet(MultiGetRequest multiGetRequest, RequestOptions options) throws IOException { + return mget(multiGetRequest, options); + } + + + /** + * Retrieves multiple documents by id using the Multi Get API. + * See Multi Get API on elastic.co + * @param multiGetRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final MultiGetResponse mget(MultiGetRequest multiGetRequest, RequestOptions options) throws IOException { return performRequestAndParseEntity(multiGetRequest, RequestConverters::multiGet, options, MultiGetResponse::fromXContent, singleton(404)); } @@ -396,8 +411,21 @@ public final MultiGetResponse multiGet(MultiGetRequest multiGetRequest, RequestO * @param multiGetRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion + * @deprecated use {@link #mgetAsync(MultiGetRequest, RequestOptions, ActionListener)} instead */ + @Deprecated public final void multiGetAsync(MultiGetRequest multiGetRequest, RequestOptions options, ActionListener listener) { + mgetAsync(multiGetRequest, options, listener); + } + + /** + * Asynchronously retrieves multiple documents by id using the Multi Get API. + * See Multi Get API on elastic.co + * @param multiGetRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void mgetAsync(MultiGetRequest multiGetRequest, RequestOptions options, ActionListener listener) { performRequestAsyncAndParseEntity(multiGetRequest, RequestConverters::multiGet, options, MultiGetResponse::fromXContent, listener, singleton(404)); } @@ -531,8 +559,23 @@ public final void searchAsync(SearchRequest searchRequest, RequestOptions option * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response + * @deprecated use {@link #msearch(MultiSearchRequest, RequestOptions)} instead */ + @Deprecated public final MultiSearchResponse multiSearch(MultiSearchRequest multiSearchRequest, RequestOptions options) throws IOException { + return msearch(multiSearchRequest, options); + } + + /** + * Executes a multi search using the msearch API. + * See Multi search API on + * elastic.co + * @param multiSearchRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final MultiSearchResponse msearch(MultiSearchRequest multiSearchRequest, RequestOptions options) throws IOException { return performRequestAndParseEntity(multiSearchRequest, RequestConverters::multiSearch, options, MultiSearchResponse::fromXContext, emptySet()); } @@ -544,9 +587,24 @@ public final MultiSearchResponse multiSearch(MultiSearchRequest multiSearchReque * @param searchRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion + * @deprecated use {@link #msearchAsync(MultiSearchRequest, RequestOptions, ActionListener)} instead */ + @Deprecated public final void multiSearchAsync(MultiSearchRequest searchRequest, RequestOptions options, - ActionListener listener) { + ActionListener listener) { + msearchAsync(searchRequest, options, listener); + } + + /** + * Asynchronously executes a multi search using the msearch API. + * See Multi search API on + * elastic.co + * @param searchRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void msearchAsync(MultiSearchRequest searchRequest, RequestOptions options, + ActionListener listener) { performRequestAsyncAndParseEntity(searchRequest, RequestConverters::multiSearch, options, MultiSearchResponse::fromXContext, listener, emptySet()); } @@ -559,8 +617,23 @@ public final void multiSearchAsync(MultiSearchRequest searchRequest, RequestOpti * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response + * @deprecated use {@link #scroll(SearchScrollRequest, RequestOptions)} instead */ + @Deprecated public final SearchResponse searchScroll(SearchScrollRequest searchScrollRequest, RequestOptions options) throws IOException { + return scroll(searchScrollRequest, options); + } + + /** + * Executes a search using the Search Scroll API. + * See Search Scroll + * API on elastic.co + * @param searchScrollRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final SearchResponse scroll(SearchScrollRequest searchScrollRequest, RequestOptions options) throws IOException { return performRequestAndParseEntity(searchScrollRequest, RequestConverters::searchScroll, options, SearchResponse::fromXContent, emptySet()); } @@ -572,9 +645,24 @@ public final SearchResponse searchScroll(SearchScrollRequest searchScrollRequest * @param searchScrollRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion + * @deprecated use {@link #scrollAsync(SearchScrollRequest, RequestOptions, ActionListener)} instead */ + @Deprecated public final void searchScrollAsync(SearchScrollRequest searchScrollRequest, RequestOptions options, - ActionListener listener) { + ActionListener listener) { + scrollAsync(searchScrollRequest, options, listener); + } + + /** + * Asynchronously executes a search using the Search Scroll API. + * See Search Scroll + * API on elastic.co + * @param searchScrollRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void scrollAsync(SearchScrollRequest searchScrollRequest, RequestOptions options, + ActionListener listener) { performRequestAsyncAndParseEntity(searchScrollRequest, RequestConverters::searchScroll, options, SearchResponse::fromXContent, listener, emptySet()); } @@ -691,8 +779,8 @@ public final RankEvalResponse rankEval(RankEvalRequest rankEvalRequest, RequestO * See Multi Search Template API * on elastic.co. */ - public final MultiSearchTemplateResponse multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest, - RequestOptions options) throws IOException { + public final MultiSearchTemplateResponse msearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest, + RequestOptions options) throws IOException { return performRequestAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate, options, MultiSearchTemplateResponse::fromXContext, emptySet()); } @@ -703,9 +791,9 @@ public final MultiSearchTemplateResponse multiSearchTemplate(MultiSearchTemplate * See Multi Search Template API * on elastic.co. */ - public final void multiSearchTemplateAsync(MultiSearchTemplateRequest multiSearchTemplateRequest, - RequestOptions options, - ActionListener listener) { + public final void msearchTemplateAsync(MultiSearchTemplateRequest multiSearchTemplateRequest, + RequestOptions options, + ActionListener listener) { performRequestAsyncAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate, options, MultiSearchTemplateResponse::fromXContext, listener, emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java index f75f6cdef2405..ae115839baeaf 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java @@ -63,7 +63,7 @@ public final class SnapshotClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public GetRepositoriesResponse getRepositories(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options) + public GetRepositoriesResponse getRepository(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories, options, GetRepositoriesResponse::fromXContent, emptySet()); @@ -78,8 +78,8 @@ public GetRepositoriesResponse getRepositories(GetRepositoriesRequest getReposit * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void getRepositoriesAsync(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options, - ActionListener listener) { + public void getRepositoryAsync(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options, + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories, options, GetRepositoriesResponse::fromXContent, listener, emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java new file mode 100644 index 0000000000000..73c92ba5c45d5 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; + +import java.io.IOException; + +import static java.util.Collections.emptySet; + +public final class WatcherClient { + + private final RestHighLevelClient restHighLevelClient; + + WatcherClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + /** + * Put a watch into the cluster + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public PutWatchResponse putWatch(PutWatchRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackWatcherPutWatch, options, + PutWatchResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously put a watch into the cluster + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void putWatchAsync(PutWatchRequest request, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackWatcherPutWatch, options, + PutWatchResponse::fromXContent, listener, emptySet()); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java index a497619b987bd..4acaadfdb85d5 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java @@ -39,10 +39,17 @@ * X-Pack APIs on elastic.co for more information. */ public final class XPackClient { + private final RestHighLevelClient restHighLevelClient; + private final WatcherClient watcherClient; XPackClient(RestHighLevelClient restHighLevelClient) { this.restHighLevelClient = restHighLevelClient; + this.watcherClient = new WatcherClient(restHighLevelClient); + } + + public WatcherClient watcher() { + return watcherClient; } /** diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java index 7605b1c715c74..fdd5634ddd6bd 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java @@ -79,7 +79,7 @@ public void testThatBulkProcessorCountIsCorrect() throws Exception { assertThat(listener.afterCounts.get(), equalTo(1)); assertThat(listener.bulkFailures.size(), equalTo(0)); assertResponseItems(listener.bulkItems, numDocs); - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), numDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), numDocs); } } @@ -105,7 +105,7 @@ public void testBulkProcessorFlush() throws Exception { assertThat(listener.afterCounts.get(), equalTo(1)); assertThat(listener.bulkFailures.size(), equalTo(0)); assertResponseItems(listener.bulkItems, numDocs); - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), numDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), numDocs); } } @@ -157,7 +157,7 @@ public void testBulkProcessorConcurrentRequests() throws Exception { assertThat(ids.add(bulkItemResponse.getId()), equalTo(true)); } - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), numDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), numDocs); } public void testBulkProcessorWaitOnClose() throws Exception { @@ -188,7 +188,7 @@ public void testBulkProcessorWaitOnClose() throws Exception { } assertThat(listener.bulkFailures.size(), equalTo(0)); assertResponseItems(listener.bulkItems, numDocs); - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), numDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), numDocs); } public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception { @@ -265,7 +265,7 @@ public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception } } - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), testDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), testDocs); } private static MultiGetRequest indexDocs(BulkProcessor processor, int numDocs) throws Exception { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java index c20998eeb5826..5fd9fcb661c2b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java @@ -129,7 +129,7 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure) } highLevelClient().indices().refresh(new RefreshRequest(), RequestOptions.DEFAULT); - int multiGetResponsesCount = highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT).getResponses().length; + int multiGetResponsesCount = highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT).getResponses().length; if (rejectedExecutionExpected) { assertThat(multiGetResponsesCount, lessThanOrEqualTo(numberOfAsyncOps)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index 9de4c22611c3b..89f357477fa06 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -253,7 +253,7 @@ public void testMultiGet() throws IOException { MultiGetRequest multiGetRequest = new MultiGetRequest(); multiGetRequest.add("index", "type", "id1"); multiGetRequest.add("index", "type", "id2"); - MultiGetResponse response = execute(multiGetRequest, highLevelClient()::multiGet, highLevelClient()::multiGetAsync); + MultiGetResponse response = execute(multiGetRequest, highLevelClient()::mget, highLevelClient()::mgetAsync); assertEquals(2, response.getResponses().length); assertTrue(response.getResponses()[0].isFailed()); @@ -285,7 +285,7 @@ public void testMultiGet() throws IOException { MultiGetRequest multiGetRequest = new MultiGetRequest(); multiGetRequest.add("index", "type", "id1"); multiGetRequest.add("index", "type", "id2"); - MultiGetResponse response = execute(multiGetRequest, highLevelClient()::multiGet, highLevelClient()::multiGetAsync); + MultiGetResponse response = execute(multiGetRequest, highLevelClient()::mget, highLevelClient()::mgetAsync); assertEquals(2, response.getResponses().length); assertFalse(response.getResponses()[0].isFailed()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java index 3d1db23da16b6..ff27fe21c27e6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java @@ -121,7 +121,7 @@ private static RequestOptions optionsForNodeName(String nodeName) { * so that they can be used by subclasses to implement custom logic. */ @SuppressForbidden(reason = "We're forced to uses Class#getDeclaredMethods() here because this test checks protected methods") - public void testMethodsVisibility() throws ClassNotFoundException { + public void testMethodsVisibility() { final String[] methodNames = new String[]{"parseEntity", "parseResponseException", "performRequest", diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 88cf445d436fe..36a45999b51ee 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -443,7 +443,7 @@ public void testGetMapping() throws IOException { .types("_doc"); GetMappingsResponse getMappingsResponse = - execute(request, highLevelClient().indices()::getMappings, highLevelClient().indices()::getMappingsAsync); + execute(request, highLevelClient().indices()::getMapping, highLevelClient().indices()::getMappingAsync); Map mappings = getMappingsResponse.getMappings().get(indexName).get("_doc").sourceAsMap(); Map type = new HashMap<>(); @@ -796,7 +796,7 @@ public void testForceMerge() throws IOException { createIndex(index, settings); ForceMergeRequest forceMergeRequest = new ForceMergeRequest(index); ForceMergeResponse forceMergeResponse = - execute(forceMergeRequest, highLevelClient().indices()::forceMerge, highLevelClient().indices()::forceMergeAsync); + execute(forceMergeRequest, highLevelClient().indices()::forcemerge, highLevelClient().indices()::forcemergeAsync); assertThat(forceMergeResponse.getTotalShards(), equalTo(1)); assertThat(forceMergeResponse.getSuccessfulShards(), equalTo(1)); assertThat(forceMergeResponse.getFailedShards(), equalTo(0)); @@ -807,7 +807,7 @@ public void testForceMerge() throws IOException { assertFalse(indexExists(nonExistentIndex)); ForceMergeRequest forceMergeRequest = new ForceMergeRequest(nonExistentIndex); ElasticsearchException exception = expectThrows(ElasticsearchException.class, - () -> execute(forceMergeRequest, highLevelClient().indices()::forceMerge, highLevelClient().indices()::forceMergeAsync)); + () -> execute(forceMergeRequest, highLevelClient().indices()::forcemerge, highLevelClient().indices()::forcemergeAsync)); assertEquals(RestStatus.NOT_FOUND, exception.status()); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java index 6fd6f95059577..1f5914f392cf4 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java @@ -135,7 +135,7 @@ private void testSimulatePipeline(boolean isVerbose, ); request.setVerbose(isVerbose); SimulatePipelineResponse response = - execute(request, highLevelClient().ingest()::simulatePipeline, highLevelClient().ingest()::simulatePipelineAsync); + execute(request, highLevelClient().ingest()::simulate, highLevelClient().ingest()::simulateAsync); List results = response.getResults(); assertEquals(1, results.size()); if (isVerbose) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java index b45f52f9e441c..5f38316fd752d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java @@ -66,13 +66,13 @@ public void testXPackInfo() throws IOException { assertEquals(mainResponse.getBuild().shortHash(), info.getBuildInfo().getHash()); - assertEquals("basic", info.getLicenseInfo().getType()); - assertEquals("basic", info.getLicenseInfo().getMode()); + assertEquals("trial", info.getLicenseInfo().getType()); + assertEquals("trial", info.getLicenseInfo().getMode()); assertEquals(LicenseStatus.ACTIVE, info.getLicenseInfo().getStatus()); FeatureSet graph = info.getFeatureSetsInfo().getFeatureSets().get("graph"); assertNotNull(graph.description()); - assertFalse(graph.available()); + assertTrue(graph.available()); assertTrue(graph.enabled()); assertNull(graph.nativeCodeInfo()); FeatureSet monitoring = info.getFeatureSetsInfo().getFeatureSets().get("monitoring"); @@ -82,7 +82,7 @@ public void testXPackInfo() throws IOException { assertNull(monitoring.nativeCodeInfo()); FeatureSet ml = info.getFeatureSetsInfo().getFeatureSets().get("ml"); assertNotNull(ml.description()); - assertFalse(ml.available()); + assertTrue(ml.available()); assertTrue(ml.enabled()); assertEquals(mainResponse.getVersion().toString(), ml.nativeCodeInfo().get("version").toString().replace("-SNAPSHOT", "")); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index fb4e3b22712f5..c1f47feb33d5a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -41,9 +41,9 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; -import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; @@ -125,6 +125,7 @@ import org.elasticsearch.index.rankeval.RatedRequest; import org.elasticsearch.index.rankeval.RestRankEvalAction; import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.ScriptType; @@ -145,6 +146,7 @@ import org.elasticsearch.test.RandomObjects; import org.hamcrest.CoreMatchers; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -2523,6 +2525,35 @@ public void testXPackInfo() { assertEquals(expectedParams, request.getParameters()); } + public void testXPackPutWatch() throws Exception { + PutWatchRequest putWatchRequest = new PutWatchRequest(); + String watchId = randomAlphaOfLength(10); + putWatchRequest.setId(watchId); + String body = randomAlphaOfLength(20); + putWatchRequest.setSource(new BytesArray(body), XContentType.JSON); + + Map expectedParams = new HashMap<>(); + if (randomBoolean()) { + putWatchRequest.setActive(false); + expectedParams.put("active", "false"); + } + + if (randomBoolean()) { + long version = randomLongBetween(10, 100); + putWatchRequest.setVersion(version); + expectedParams.put("version", String.valueOf(version)); + } + + Request request = RequestConverters.xPackWatcherPutWatch(putWatchRequest); + assertEquals(HttpPut.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters())); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + request.getEntity().writeTo(bos); + assertThat(bos.toString("UTF-8"), is(body)); + } + /** * Randomize the {@link FetchSourceContext} request parameters. */ diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 2925062e0e75b..5acc6f5552f55 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -20,8 +20,6 @@ package org.elasticsearch.client; import com.fasterxml.jackson.core.JsonParseException; - -import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; @@ -53,6 +51,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -73,20 +72,30 @@ import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import org.junit.Before; import java.io.IOException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.net.SocketTimeoutException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import java.util.stream.Stream; -import static org.elasticsearch.client.RestClientTestUtil.randomHeaders; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.hamcrest.CoreMatchers.endsWith; +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; @@ -137,7 +146,6 @@ public void testPingSocketTimeout() throws IOException { } public void testInfo() throws IOException { - Header[] headers = randomHeaders(random(), "Header"); MainResponse testInfo = new MainResponse("nodeName", Version.CURRENT, new ClusterName("clusterName"), "clusterUuid", Build.CURRENT); mockResponse(testInfo); @@ -150,7 +158,7 @@ public void testSearchScroll() throws IOException { null, false, false, null, 1), randomAlphaOfLengthBetween(5, 10), 5, 5, 0, 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); mockResponse(mockSearchResponse); - SearchResponse searchResponse = restHighLevelClient.searchScroll( + SearchResponse searchResponse = restHighLevelClient.scroll( new SearchScrollRequest(randomAlphaOfLengthBetween(5, 10)), RequestOptions.DEFAULT); assertEquals(mockSearchResponse.getScrollId(), searchResponse.getScrollId()); assertEquals(0, searchResponse.getHits().totalHits); @@ -632,6 +640,151 @@ public void testProvidedNamedXContents() { assertTrue(names.contains(DiscountedCumulativeGain.NAME)); } + public void testApiNamingConventions() throws Exception { + //this list should be empty once the high-level client is feature complete + String[] notYetSupportedApi = new String[]{ + "cluster.remote_info", + "count", + "create", + "delete_by_query", + "exists_source", + "get_source", + "indices.delete_alias", + "indices.delete_template", + "indices.exists_template", + "indices.exists_type", + "indices.get_upgrade", + "indices.put_alias", + "mtermvectors", + "put_script", + "reindex", + "reindex_rethrottle", + "render_search_template", + "scripts_painless_execute", + "snapshot.restore", + "tasks.get", + "termvectors", + "update_by_query" + }; + //These API are not required for high-level client feature completeness + String[] notRequiredApi = new String[] { + "cluster.allocation_explain", + "cluster.pending_tasks", + "cluster.reroute", + "cluster.state", + "cluster.stats", + "indices.shard_stores", + "indices.upgrade", + "indices.recovery", + "indices.segments", + "indices.stats", + "ingest.processor_grok", + "nodes.info", + "nodes.stats", + "nodes.hot_threads", + "nodes.usage", + "search_shards", + }; + Set deprecatedMethods = new HashSet<>(); + deprecatedMethods.add("indices.force_merge"); + deprecatedMethods.add("multi_get"); + deprecatedMethods.add("multi_search"); + deprecatedMethods.add("search_scroll"); + + ClientYamlSuiteRestSpec restSpec = ClientYamlSuiteRestSpec.load("/rest-api-spec/api"); + Set apiSpec = restSpec.getApis().stream().map(ClientYamlSuiteRestApi::getName).collect(Collectors.toSet()); + + Set topLevelMethodsExclusions = new HashSet<>(); + topLevelMethodsExclusions.add("getLowLevelClient"); + topLevelMethodsExclusions.add("close"); + + Map methods = Arrays.stream(RestHighLevelClient.class.getMethods()) + .filter(method -> method.getDeclaringClass().equals(RestHighLevelClient.class) + && topLevelMethodsExclusions.contains(method.getName()) == false) + .map(method -> Tuple.tuple(toSnakeCase(method.getName()), method)) + .flatMap(tuple -> tuple.v2().getReturnType().getName().endsWith("Client") + ? getSubClientMethods(tuple.v1(), tuple.v2().getReturnType()) : Stream.of(tuple)) + .collect(Collectors.toMap(Tuple::v1, Tuple::v2)); + + Set apiNotFound = new HashSet<>(); + + for (Map.Entry entry : methods.entrySet()) { + Method method = entry.getValue(); + String apiName = entry.getKey(); + + assertTrue("method [" + apiName + "] is not final", + Modifier.isFinal(method.getClass().getModifiers()) || Modifier.isFinal(method.getModifiers())); + assertTrue(Modifier.isPublic(method.getModifiers())); + + //we convert all the method names to snake case, hence we need to look for the '_async' suffix rather than 'Async' + if (apiName.endsWith("_async")) { + assertTrue("async method [" + method.getName() + "] doesn't have corresponding sync method", + methods.containsKey(apiName.substring(0, apiName.length() - 6))); + assertThat(method.getReturnType(), equalTo(Void.TYPE)); + assertEquals(0, method.getExceptionTypes().length); + assertEquals(3, method.getParameterTypes().length); + assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request")); + assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName())); + assertThat(method.getParameterTypes()[2].getName(), equalTo(ActionListener.class.getName())); + } else { + //A few methods return a boolean rather than a response object + if (apiName.equals("ping") || apiName.contains("exist")) { + assertThat(method.getReturnType().getSimpleName(), equalTo("boolean")); + } else { + assertThat(method.getReturnType().getSimpleName(), endsWith("Response")); + } + + assertEquals(1, method.getExceptionTypes().length); + //a few methods don't accept a request object as argument + if (apiName.equals("ping") || apiName.equals("info")) { + assertEquals(1, method.getParameterTypes().length); + assertThat(method.getParameterTypes()[0].getName(), equalTo(RequestOptions.class.getName())); + } else { + assertEquals(apiName, 2, method.getParameterTypes().length); + assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request")); + assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName())); + } + + boolean remove = apiSpec.remove(apiName); + if (remove == false && deprecatedMethods.contains(apiName) == false) { + //TODO xpack api are currently ignored, we need to load xpack yaml spec too + if (apiName.startsWith("xpack.") == false) { + apiNotFound.add(apiName); + } + } + } + } + assertThat("Some client method doesn't match a corresponding API defined in the REST spec: " + apiNotFound, + apiNotFound.size(), equalTo(0)); + + //we decided not to support cat API in the high-level REST client, they are supposed to be used from a low-level client + apiSpec.removeIf(api -> api.startsWith("cat.")); + Stream.concat(Arrays.stream(notYetSupportedApi), Arrays.stream(notRequiredApi)).forEach( + api -> assertTrue(api + " API is either not defined in the spec or already supported by the high-level client", + apiSpec.remove(api))); + assertThat("Some API are not supported but they should be: " + apiSpec, apiSpec.size(), equalTo(0)); + } + + private static Stream> getSubClientMethods(String namespace, Class clientClass) { + return Arrays.stream(clientClass.getMethods()).filter(method -> method.getDeclaringClass().equals(clientClass)) + .map(method -> Tuple.tuple(namespace + "." + toSnakeCase(method.getName()), method)) + .flatMap(tuple -> tuple.v2().getReturnType().getName().endsWith("Client") + ? getSubClientMethods(tuple.v1(), tuple.v2().getReturnType()) : Stream.of(tuple)); + } + + private static String toSnakeCase(String camelCase) { + StringBuilder snakeCaseString = new StringBuilder(); + for (Character aChar : camelCase.toCharArray()) { + if (Character.isUpperCase(aChar)) { + snakeCaseString.append('_'); + snakeCaseString.append(Character.toLowerCase(aChar)); + } else { + snakeCaseString.append(aChar); + } + } + return snakeCaseString.toString(); + } + private static class TrackingActionListener implements ActionListener { private final AtomicInteger statusCode = new AtomicInteger(-1); private final AtomicReference exception = new AtomicReference<>(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index ce9091a91ff8b..9c9c5425f0006 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -597,7 +597,7 @@ public void testSearchScroll() throws Exception { } searchResponse = execute(new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)), - highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync); + highLevelClient()::scroll, highLevelClient()::scrollAsync); assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); @@ -606,7 +606,7 @@ public void testSearchScroll() throws Exception { } searchResponse = execute(new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)), - highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync); + highLevelClient()::scroll, highLevelClient()::scrollAsync); assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(30)); @@ -623,7 +623,7 @@ public void testSearchScroll() throws Exception { SearchScrollRequest scrollRequest = new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)); ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> execute(scrollRequest, - highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync)); + highLevelClient()::scroll, highLevelClient()::scrollAsync)); assertEquals(RestStatus.NOT_FOUND, exception.status()); assertThat(exception.getRootCause(), instanceOf(ElasticsearchException.class)); ElasticsearchException rootCause = (ElasticsearchException) exception.getRootCause(); @@ -644,7 +644,7 @@ public void testMultiSearch() throws Exception { multiSearchRequest.add(searchRequest3); MultiSearchResponse multiSearchResponse = - execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); @@ -686,7 +686,7 @@ public void testMultiSearch_withAgg() throws Exception { multiSearchRequest.add(searchRequest3); MultiSearchResponse multiSearchResponse = - execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); @@ -734,7 +734,7 @@ public void testMultiSearch_withQuery() throws Exception { multiSearchRequest.add(searchRequest3); MultiSearchResponse multiSearchResponse = - execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); @@ -759,7 +759,7 @@ public void testMultiSearch_withQuery() throws Exception { searchRequest1.source().highlighter(new HighlightBuilder().field("field")); searchRequest2.source().highlighter(new HighlightBuilder().field("field")); searchRequest3.source().highlighter(new HighlightBuilder().field("field")); - multiSearchResponse = execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + multiSearchResponse = execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); @@ -797,7 +797,7 @@ public void testMultiSearch_failure() throws Exception { multiSearchRequest.add(searchRequest2); MultiSearchResponse multiSearchResponse = - execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(2)); @@ -941,8 +941,8 @@ public void testMultiSearchTemplate() throws Exception { multiSearchTemplateRequest.add(badRequest); MultiSearchTemplateResponse multiSearchTemplateResponse = - execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, - highLevelClient()::multiSearchTemplateAsync); + execute(multiSearchTemplateRequest, highLevelClient()::msearchTemplate, + highLevelClient()::msearchTemplateAsync); Item[] responses = multiSearchTemplateResponse.getResponses(); @@ -999,8 +999,8 @@ public void testMultiSearchTemplateAllBad() throws Exception { // The whole HTTP request should fail if no nested search requests are valid ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, - () -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, - highLevelClient()::multiSearchTemplateAsync)); + () -> execute(multiSearchTemplateRequest, highLevelClient()::msearchTemplate, + highLevelClient()::msearchTemplateAsync)); assertEquals(RestStatus.BAD_REQUEST, exception.status()); assertThat(exception.getMessage(), containsString("no requests added")); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java index 6d035f5db654a..5483f055c2c12 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java @@ -77,8 +77,8 @@ public void testSnapshotGetRepositoriesUsingParams() throws IOException { GetRepositoriesRequest request = new GetRepositoriesRequest(); request.repositories(new String[]{testRepository}); - GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepositories, - highLevelClient().snapshot()::getRepositoriesAsync); + GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepository, + highLevelClient().snapshot()::getRepositoryAsync); assertThat(1, equalTo(response.repositories().size())); } @@ -86,8 +86,8 @@ public void testSnapshotGetDefaultRepositories() throws IOException { assertTrue(createTestRepository("other", FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged()); assertTrue(createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged()); - GetRepositoriesResponse response = execute(new GetRepositoriesRequest(), highLevelClient().snapshot()::getRepositories, - highLevelClient().snapshot()::getRepositoriesAsync); + GetRepositoriesResponse response = execute(new GetRepositoriesRequest(), highLevelClient().snapshot()::getRepository, + highLevelClient().snapshot()::getRepositoryAsync); assertThat(2, equalTo(response.repositories().size())); } @@ -95,7 +95,7 @@ public void testSnapshotGetRepositoriesNonExistent() { String repository = "doesnotexist"; GetRepositoriesRequest request = new GetRepositoriesRequest(new String[]{repository}); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> execute(request, - highLevelClient().snapshot()::getRepositories, highLevelClient().snapshot()::getRepositoriesAsync)); + highLevelClient().snapshot()::getRepository, highLevelClient().snapshot()::getRepositoryAsync)); assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND)); assertThat(exception.getMessage(), equalTo( @@ -107,8 +107,8 @@ public void testSnapshotDeleteRepository() throws IOException { assertTrue(createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged()); GetRepositoriesRequest request = new GetRepositoriesRequest(); - GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepositories, - highLevelClient().snapshot()::getRepositoriesAsync); + GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepository, + highLevelClient().snapshot()::getRepositoryAsync); assertThat(1, equalTo(response.repositories().size())); DeleteRepositoryRequest deleteRequest = new DeleteRepositoryRequest(repository); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java new file mode 100644 index 0000000000000..dec438a47ab18 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; + +import static org.hamcrest.Matchers.is; + +public class WatcherIT extends ESRestHighLevelClientTestCase { + + public void testPutWatch() throws Exception { + String watchId = randomAlphaOfLength(10); + String json = "{ \n" + + " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" + + " \"input\": { \"none\": {} },\n" + + " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" + + "}"; + BytesReference bytesReference = new BytesArray(json); + PutWatchRequest putWatchRequest = new PutWatchRequest(watchId, bytesReference, XContentType.JSON); + PutWatchResponse putWatchResponse = highLevelClient().xpack().watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT); + assertThat(putWatchResponse.isCreated(), is(true)); + assertThat(putWatchResponse.getId(), is(watchId)); + assertThat(putWatchResponse.getVersion(), is(1L)); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 9dad115643cbf..ad41c139ddc37 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -1121,7 +1121,7 @@ public void testMultiGet() throws Exception { // end::multi-get-request-top-level-extras // tag::multi-get-execute - MultiGetResponse response = client.multiGet(request, RequestOptions.DEFAULT); + MultiGetResponse response = client.mget(request, RequestOptions.DEFAULT); // end::multi-get-execute // tag::multi-get-response @@ -1174,7 +1174,7 @@ public void onFailure(Exception e) { listener = new LatchedActionListener<>(listener, latch); // tag::multi-get-execute-async - client.multiGetAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.mgetAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::multi-get-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -1185,7 +1185,7 @@ public void onFailure(Exception e) { request.add(new MultiGetRequest.Item("index", "type", "example_id") .fetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE)); // <1> // end::multi-get-request-no-source - MultiGetItemResponse item = unwrapAndAssertExample(client.multiGet(request, RequestOptions.DEFAULT)); + MultiGetItemResponse item = unwrapAndAssertExample(client.mget(request, RequestOptions.DEFAULT)); assertNull(item.getResponse().getSource()); } { @@ -1198,7 +1198,7 @@ public void onFailure(Exception e) { request.add(new MultiGetRequest.Item("index", "type", "example_id") .fetchSourceContext(fetchSourceContext)); // <1> // end::multi-get-request-source-include - MultiGetItemResponse item = unwrapAndAssertExample(client.multiGet(request, RequestOptions.DEFAULT)); + MultiGetItemResponse item = unwrapAndAssertExample(client.mget(request, RequestOptions.DEFAULT)); assertThat(item.getResponse().getSource(), hasEntry("foo", "val1")); assertThat(item.getResponse().getSource(), hasEntry("bar", "val2")); assertThat(item.getResponse().getSource(), not(hasKey("baz"))); @@ -1213,7 +1213,7 @@ public void onFailure(Exception e) { request.add(new MultiGetRequest.Item("index", "type", "example_id") .fetchSourceContext(fetchSourceContext)); // <1> // end::multi-get-request-source-exclude - MultiGetItemResponse item = unwrapAndAssertExample(client.multiGet(request, RequestOptions.DEFAULT)); + MultiGetItemResponse item = unwrapAndAssertExample(client.mget(request, RequestOptions.DEFAULT)); assertThat(item.getResponse().getSource(), not(hasKey("foo"))); assertThat(item.getResponse().getSource(), not(hasKey("bar"))); assertThat(item.getResponse().getSource(), hasEntry("baz", "val3")); @@ -1223,7 +1223,7 @@ public void onFailure(Exception e) { // tag::multi-get-request-stored request.add(new MultiGetRequest.Item("index", "type", "example_id") .storedFields("foo")); // <1> - MultiGetResponse response = client.multiGet(request, RequestOptions.DEFAULT); + MultiGetResponse response = client.mget(request, RequestOptions.DEFAULT); MultiGetItemResponse item = response.getResponses()[0]; String value = item.getResponse().getField("foo").getValue(); // <2> // end::multi-get-request-stored @@ -1235,7 +1235,7 @@ public void onFailure(Exception e) { MultiGetRequest request = new MultiGetRequest(); request.add(new MultiGetRequest.Item("index", "type", "example_id") .version(1000L)); - MultiGetResponse response = client.multiGet(request, RequestOptions.DEFAULT); + MultiGetResponse response = client.mget(request, RequestOptions.DEFAULT); MultiGetItemResponse item = response.getResponses()[0]; assertNull(item.getResponse()); // <1> Exception e = item.getFailure().getFailure(); // <2> diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 23dab5b21e2ab..36d562c501f47 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -622,7 +622,7 @@ public void testGetMapping() throws IOException { // end::get-mapping-request-indicesOptions // tag::get-mapping-execute - GetMappingsResponse getMappingResponse = client.indices().getMappings(request, RequestOptions.DEFAULT); + GetMappingsResponse getMappingResponse = client.indices().getMapping(request, RequestOptions.DEFAULT); // end::get-mapping-execute // tag::get-mapping-response @@ -704,7 +704,7 @@ public void onFailure(Exception e) { }); // tag::get-mapping-execute-async - client.indices().getMappingsAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.indices().getMappingAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-mapping-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -1344,7 +1344,7 @@ public void testForceMergeIndex() throws Exception { // end::force-merge-request-flush // tag::force-merge-execute - ForceMergeResponse forceMergeResponse = client.indices().forceMerge(request, RequestOptions.DEFAULT); + ForceMergeResponse forceMergeResponse = client.indices().forcemerge(request, RequestOptions.DEFAULT); // end::force-merge-execute // tag::force-merge-response @@ -1369,14 +1369,14 @@ public void onFailure(Exception e) { // end::force-merge-execute-listener // tag::force-merge-execute-async - client.indices().forceMergeAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.indices().forcemergeAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::force-merge-execute-async } { // tag::force-merge-notfound try { ForceMergeRequest request = new ForceMergeRequest("does_not_exist"); - client.indices().forceMerge(request, RequestOptions.DEFAULT); + client.indices().forcemerge(request, RequestOptions.DEFAULT); } catch (ElasticsearchException exception) { if (exception.status() == RestStatus.NOT_FOUND) { // <1> diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java index c53ec2b5d7cc7..98502e3668af1 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java @@ -317,7 +317,7 @@ public void testSimulatePipeline() throws IOException { // end::simulate-pipeline-request-verbose // tag::simulate-pipeline-execute - SimulatePipelineResponse response = client.ingest().simulatePipeline(request, RequestOptions.DEFAULT); // <1> + SimulatePipelineResponse response = client.ingest().simulate(request, RequestOptions.DEFAULT); // <1> // end::simulate-pipeline-execute // tag::simulate-pipeline-response @@ -381,7 +381,7 @@ public void onFailure(Exception e) { listener = new LatchedActionListener<>(listener, latch); // tag::simulate-pipeline-execute-async - client.ingest().simulatePipelineAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.ingest().simulateAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::simulate-pipeline-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java index a99b991620a25..a9fe4aba2f75a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java @@ -39,11 +39,13 @@ import org.elasticsearch.protocol.xpack.XPackUsageResponse; import java.io.IOException; +import java.time.Instant; import java.util.EnumSet; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; /** @@ -97,8 +99,7 @@ public void testXPackInfo() throws Exception { //tag::x-pack-info-response BuildInfo build = response.getBuildInfo(); // <1> LicenseInfo license = response.getLicenseInfo(); // <2> - assertEquals(XPackInfoResponse.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS, - license.getExpiryDate()); // <3> + assertThat(license.getExpiryDate(), is(greaterThan(Instant.now().toEpochMilli()))); // <3> FeatureSetsInfo features = response.getFeatureSetsInfo(); // <4> //end::x-pack-info-response diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index 26bb4682fd9db..c60f2d4c92b87 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -583,7 +583,7 @@ public void testScroll() throws Exception { // tag::search-scroll2 SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId); // <1> scrollRequest.scroll(TimeValue.timeValueSeconds(30)); - SearchResponse searchScrollResponse = client.searchScroll(scrollRequest, RequestOptions.DEFAULT); + SearchResponse searchScrollResponse = client.scroll(scrollRequest, RequestOptions.DEFAULT); scrollId = searchScrollResponse.getScrollId(); // <2> hits = searchScrollResponse.getHits(); // <3> assertEquals(3, hits.getTotalHits()); @@ -612,7 +612,7 @@ public void testScroll() throws Exception { // end::scroll-request-arguments // tag::search-scroll-execute-sync - SearchResponse searchResponse = client.searchScroll(scrollRequest, RequestOptions.DEFAULT); + SearchResponse searchResponse = client.scroll(scrollRequest, RequestOptions.DEFAULT); // end::search-scroll-execute-sync assertEquals(0, searchResponse.getFailedShards()); @@ -638,7 +638,7 @@ public void onFailure(Exception e) { scrollListener = new LatchedActionListener<>(scrollListener, latch); // tag::search-scroll-execute-async - client.searchScrollAsync(scrollRequest, RequestOptions.DEFAULT, scrollListener); // <1> + client.scrollAsync(scrollRequest, RequestOptions.DEFAULT, scrollListener); // <1> // end::search-scroll-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -710,7 +710,7 @@ public void onFailure(Exception e) { while (searchHits != null && searchHits.length > 0) { // <2> SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId); // <3> scrollRequest.scroll(scroll); - searchResponse = client.searchScroll(scrollRequest, RequestOptions.DEFAULT); + searchResponse = client.scroll(scrollRequest, RequestOptions.DEFAULT); scrollId = searchResponse.getScrollId(); searchHits = searchResponse.getHits().getHits(); // <4> @@ -861,7 +861,7 @@ public void testMultiSearchTemplateWithInlineScript() throws Exception { // end::multi-search-template-request-inline // tag::multi-search-template-request-sync - MultiSearchTemplateResponse multiResponse = client.multiSearchTemplate(multiRequest, RequestOptions.DEFAULT); + MultiSearchTemplateResponse multiResponse = client.msearchTemplate(multiRequest, RequestOptions.DEFAULT); // end::multi-search-template-request-sync // tag::multi-search-template-response @@ -916,7 +916,7 @@ public void testMultiSearchTemplateWithStoredScript() throws Exception { // tag::multi-search-template-execute - MultiSearchTemplateResponse multiResponse = client.multiSearchTemplate(multiRequest, RequestOptions.DEFAULT); + MultiSearchTemplateResponse multiResponse = client.msearchTemplate(multiRequest, RequestOptions.DEFAULT); // end::multi-search-template-execute assertNotNull(multiResponse); @@ -944,7 +944,7 @@ public void onFailure(Exception e) { listener = new LatchedActionListener<>(listener, latch); // tag::multi-search-template-execute-async - client.multiSearchTemplateAsync(multiRequest, RequestOptions.DEFAULT, listener); + client.msearchTemplateAsync(multiRequest, RequestOptions.DEFAULT, listener); // end::multi-search-template-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -1201,7 +1201,7 @@ public void testMultiSearch() throws Exception { request.add(secondSearchRequest); // end::multi-search-request-basic // tag::multi-search-execute - MultiSearchResponse response = client.multiSearch(request, RequestOptions.DEFAULT); + MultiSearchResponse response = client.msearch(request, RequestOptions.DEFAULT); // end::multi-search-execute // tag::multi-search-response MultiSearchResponse.Item firstResponse = response.getResponses()[0]; // <1> @@ -1233,7 +1233,7 @@ public void onFailure(Exception e) { listener = new LatchedActionListener<>(listener, latch); // tag::multi-search-execute-async - client.multiSearchAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.msearchAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::multi-search-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -1244,7 +1244,7 @@ public void onFailure(Exception e) { request.add(new SearchRequest("posts") // <1> .types("doc")); // <2> // end::multi-search-request-index - MultiSearchResponse response = client.multiSearch(request, RequestOptions.DEFAULT); + MultiSearchResponse response = client.msearch(request, RequestOptions.DEFAULT); MultiSearchResponse.Item firstResponse = response.getResponses()[0]; assertNull(firstResponse.getFailure()); SearchResponse searchResponse = firstResponse.getResponse(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index 68a8113af6d38..fff3e7ece7066 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -221,7 +221,7 @@ public void testSnapshotGetRepository() throws IOException { // end::get-repository-request-masterTimeout // tag::get-repository-execute - GetRepositoriesResponse response = client.snapshot().getRepositories(request, RequestOptions.DEFAULT); + GetRepositoriesResponse response = client.snapshot().getRepository(request, RequestOptions.DEFAULT); // end::get-repository-execute // tag::get-repository-response @@ -256,7 +256,7 @@ public void onFailure(Exception e) { listener = new LatchedActionListener<>(listener, latch); // tag::get-repository-execute-async - client.snapshot().getRepositoriesAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.snapshot().getRepositoryAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-repository-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java new file mode 100644 index 0000000000000..df51d896cda70 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.documentation; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { + + public void testPutWatch() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + //tag::x-pack-put-watch-execute + // you can also use the WatchSourceBuilder from org.elasticsearch.plugin:x-pack-core to create a watch programmatically + BytesReference watch = new BytesArray("{ \n" + + " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" + + " \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n" + + " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" + + "}"); + PutWatchRequest request = new PutWatchRequest("my_watch_id", watch, XContentType.JSON); + request.setActive(false); // <1> + PutWatchResponse response = client.xpack().watcher().putWatch(request, RequestOptions.DEFAULT); + //end::x-pack-put-watch-execute + + //tag::x-pack-put-watch-response + String watchId = response.getId(); // <1> + boolean isCreated = response.isCreated(); // <2> + long version = response.getVersion(); // <3> + //end::x-pack-put-watch-response + } + + { + BytesReference watch = new BytesArray("{ \n" + + " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" + + " \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n" + + " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" + + "}"); + PutWatchRequest request = new PutWatchRequest("my_other_watch_id", watch, XContentType.JSON); + // tag::x-pack-put-watch-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(PutWatchResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-put-watch-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-put-watch-execute-async + client.xpack().watcher().putWatchAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-put-watch-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } +} diff --git a/client/rest/build.gradle b/client/rest/build.gradle index b1ed05a834213..fc2ab0bc4c05d 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -59,6 +59,10 @@ forbiddenApisMain { PrecommitTasks.getResource('/forbidden/http-signatures.txt')] } +forbiddenPatterns { + exclude '**/*.der' +} + forbiddenApisTest { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage bundledSignatures -= 'jdk-non-portable' diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java index 30359ea90f666..49eefc527baf1 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java @@ -30,14 +30,21 @@ import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.TrustManagerFactory; import java.io.IOException; import java.io.InputStream; import java.net.InetAddress; import java.net.InetSocketAddress; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.security.KeyFactory; import java.security.KeyStore; +import java.security.cert.Certificate; +import java.security.cert.CertificateFactory; +import java.security.spec.PKCS8EncodedKeySpec; -import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @@ -72,9 +79,6 @@ public static void stopHttpServers() throws IOException { } public void testBuilderUsesDefaultSSLContext() throws Exception { - assumeFalse("Due to bug inside jdk, this test can't momentarily run with java 11. " + - "See: https://github.com/elastic/elasticsearch/issues/31940", - System.getProperty("java.version").contains("11")); final SSLContext defaultSSLContext = SSLContext.getDefault(); try { try (RestClient client = buildRestClient()) { @@ -82,7 +86,7 @@ public void testBuilderUsesDefaultSSLContext() throws Exception { client.performRequest(new Request("GET", "/")); fail("connection should have been rejected due to SSL handshake"); } catch (Exception e) { - assertThat(e.getMessage(), containsString("General SSLEngine problem")); + assertThat(e, instanceOf(SSLHandshakeException.class)); } } @@ -103,12 +107,20 @@ private RestClient buildRestClient() { private static SSLContext getSslContext() throws Exception { SSLContext sslContext = SSLContext.getInstance("TLS"); - try (InputStream in = RestClientBuilderIntegTests.class.getResourceAsStream("/testks.jks")) { - KeyStore keyStore = KeyStore.getInstance("JKS"); - keyStore.load(in, "password".toCharArray()); - KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509"); + try (InputStream certFile = RestClientBuilderIntegTests.class.getResourceAsStream("/test.crt")) { + // Build a keystore of default type programmatically since we can't use JKS keystores to + // init a KeyManagerFactory in FIPS 140 JVMs. + KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); + keyStore.load(null, "password".toCharArray()); + CertificateFactory certFactory = CertificateFactory.getInstance("X.509"); + PKCS8EncodedKeySpec privateKeySpec = new PKCS8EncodedKeySpec(Files.readAllBytes(Paths.get(RestClientBuilderIntegTests.class + .getResource("/test.der").toURI()))); + KeyFactory keyFactory = KeyFactory.getInstance("RSA"); + keyStore.setKeyEntry("mykey", keyFactory.generatePrivate(privateKeySpec), "password".toCharArray(), + new Certificate[]{certFactory.generateCertificate(certFile)}); + KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); kmf.init(keyStore, "password".toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509"); + TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); tmf.init(keyStore); sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null); } diff --git a/client/rest/src/test/resources/test.crt b/client/rest/src/test/resources/test.crt new file mode 100644 index 0000000000000..ab1b8c2265df0 --- /dev/null +++ b/client/rest/src/test/resources/test.crt @@ -0,0 +1,24 @@ +-----BEGIN CERTIFICATE----- +MIIEATCCAumgAwIBAgIEObhDZDANBgkqhkiG9w0BAQsFADBnMQswCQYDVQQGEwJV +UzELMAkGA1UECBMCQ0ExFjAUBgNVBAcTDU1vdW50YWluIFZpZXcxEDAOBgNVBAoT +B2VsYXN0aWMxDTALBgNVBAsTBHRlc3QxEjAQBgNVBAMTCXRlc3Qgbm9kZTAeFw0x +NzA3MTcxNjEyNTZaFw0yNzA3MTUxNjEyNTZaMGcxCzAJBgNVBAYTAlVTMQswCQYD +VQQIEwJDQTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMHZWxhc3Rp +YzENMAsGA1UECxMEdGVzdDESMBAGA1UEAxMJdGVzdCBub2RlMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnXtuGIgAq6vWzUD34HXkYF+0u103hb8d1h35 +kjeuNApkUhS6x/VbuNp7TpWmprfDgG5w9TourHvyiqcQMDEWrBunS6rmKo1jK1Wm +le3qA3F2l9VIZSNeeYQgezmzuElEPPmBjN8XBByIWKYjZcGd5u7DiquPUh9QLIev +itgB2jfi9D8ewyvaSbVAQuQwyIaDN9L74wKyMC8EuzzAWNSDjgIhhwcR5qg17msa +ItyM44/3hik+ObIGpMlLSxQu2V1U9bOaq48JjQBLHVg1vzC9VzGuNdEb8haFnhJN +UrdESdHymbtBSUvy30iB+kHq5R8wQ4pC+WxChQnbA2GskuFrMQIDAQABo4G0MIGx +MIGPBgNVHREEgYcwgYSHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAGCCWxvY2FsaG9z +dIIVbG9jYWxob3N0LmxvY2FsZG9tYWluggpsb2NhbGhvc3Q0ghdsb2NhbGhvc3Q0 +LmxvY2FsZG9tYWluNIIKbG9jYWxob3N0NoIXbG9jYWxob3N0Ni5sb2NhbGRvbWFp +bjYwHQYDVR0OBBYEFFwNcqIKfGBCBGo9faQJ3TsHmp0SMA0GCSqGSIb3DQEBCwUA +A4IBAQBvUJTRjSOf/+vtyS3OokwRilg1ZGF3psg0DWhjH2ehIRfNibU1Y8FVQo3I +VU8LjcIUK1cN85z+AsYqLXo/C4qmJPydQ1tGpQL7uIrPD4h+Xh3tY6A2DKRJRQFO +w2LjswPidGufMztpPbXxLREqvkvn80VkDnc44UPxYfHvZFqYwYyxZccA5mm+BhYu +IerjfvgX+8zMWIQZOd+jRq8EaVTmVK2Azwwhc5ImWfc0DA3pmGPdECzE4N0VVoIJ +N8PCVltXXP3F7K3LoT6CLSiJ3c/IDVNoVS4pRV6R6Y4oIKD9T/T1kAgAvOrUGRWY +ejWQ41GdUmkmxrqCaMbVCO4s72BC +-----END CERTIFICATE----- diff --git a/client/rest/src/test/resources/test.der b/client/rest/src/test/resources/test.der new file mode 100644 index 0000000000000..454bfd286bd97 Binary files /dev/null and b/client/rest/src/test/resources/test.der differ diff --git a/client/test/build.gradle b/client/test/build.gradle index 59c45186fe76b..cc69a1828dc85 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -25,6 +25,8 @@ apply plugin: 'elasticsearch.build' targetCompatibility = JavaVersion.VERSION_1_7 sourceCompatibility = JavaVersion.VERSION_1_7 +group = "${group}.client.test" + dependencies { compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java index 9c9b6af705a8e..71a41db80a253 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java @@ -19,14 +19,11 @@ package org.elasticsearch.test.rest; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import java.io.IOException; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; @@ -49,26 +46,31 @@ public void testIndexWithoutId() throws IOException { } public void testUpsert() throws IOException { - locationTestCase(client().performRequest("POST", "test/test/1/_update", emptyMap(), new StringEntity("{" - + "\"doc\": {\"test\": \"test\"}," - + "\"doc_as_upsert\": true}", ContentType.APPLICATION_JSON))); + Request request = new Request("POST", "test/test/1/_update"); + request.setJsonEntity("{" + + "\"doc\": {\"test\": \"test\"}," + + "\"doc_as_upsert\": true}"); + locationTestCase(client().performRequest(request)); } private void locationTestCase(String method, String url) throws IOException { - locationTestCase(client().performRequest(method, url, emptyMap(), - new StringEntity("{\"test\": \"test\"}", ContentType.APPLICATION_JSON))); + final Request request = new Request(method, url); + request.setJsonEntity("{\"test\": \"test\"}"); + locationTestCase(client().performRequest(request)); // we have to delete the index otherwise the second indexing request will route to the single shard and not produce a 201 final Response response = client().performRequest(new Request("DELETE", "test")); assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); - locationTestCase(client().performRequest(method, url + "?routing=cat", emptyMap(), - new StringEntity("{\"test\": \"test\"}", ContentType.APPLICATION_JSON))); + final Request withRouting = new Request(method, url); + withRouting.addParameter("routing", "cat"); + withRouting.setJsonEntity("{\"test\": \"test\"}"); + locationTestCase(client().performRequest(withRouting)); } private void locationTestCase(Response response) throws IOException { assertEquals(201, response.getStatusLine().getStatusCode()); String location = response.getHeader("Location"); assertThat(location, startsWith("/test/test/")); - Response getResponse = client().performRequest("GET", location); + Response getResponse = client().performRequest(new Request("GET", location)); assertEquals(singletonMap("test", "test"), entityAsMap(getResponse).get("_source")); } diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeRestUsageIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeRestUsageIT.java index b94aa71b04029..818037f68a111 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeRestUsageIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeRestUsageIT.java @@ -19,13 +19,11 @@ package org.elasticsearch.test.rest; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.Request; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -39,8 +37,8 @@ public class NodeRestUsageIT extends ESRestTestCase { @SuppressWarnings("unchecked") public void testWithRestUsage() throws IOException { // First get the current usage figures - Response beforeResponse = client().performRequest("GET", - randomFrom("_nodes/usage", "_nodes/usage/rest_actions", "_nodes/usage/_all")); + String path = randomFrom("_nodes/usage", "_nodes/usage/rest_actions", "_nodes/usage/_all"); + Response beforeResponse = client().performRequest(new Request("GET", path)); Map beforeResponseBodyMap = entityAsMap(beforeResponse); assertThat(beforeResponseBodyMap, notNullValue()); Map before_nodesMap = (Map) beforeResponseBodyMap.get("_nodes"); @@ -80,24 +78,24 @@ public void testWithRestUsage() throws IOException { } // Do some requests to get some rest usage stats - client().performRequest("PUT", "/test"); - client().performRequest("POST", "/test/doc/1", Collections.emptyMap(), - new StringEntity("{ \"foo\": \"bar\"}", ContentType.APPLICATION_JSON)); - client().performRequest("POST", "/test/doc/2", Collections.emptyMap(), - new StringEntity("{ \"foo\": \"bar\"}", ContentType.APPLICATION_JSON)); - client().performRequest("POST", "/test/doc/3", Collections.emptyMap(), - new StringEntity("{ \"foo\": \"bar\"}", ContentType.APPLICATION_JSON)); - client().performRequest("GET", "/test/_search"); - client().performRequest("POST", "/test/doc/4", Collections.emptyMap(), - new StringEntity("{ \"foo\": \"bar\"}", ContentType.APPLICATION_JSON)); - client().performRequest("POST", "/test/_refresh"); - client().performRequest("GET", "/_cat/indices"); - client().performRequest("GET", "/_nodes"); - client().performRequest("GET", "/test/_search"); - client().performRequest("GET", "/_nodes/stats"); - client().performRequest("DELETE", "/test"); + client().performRequest(new Request("PUT", "/test")); + for (int i = 0; i < 3; i++) { + final Request index = new Request("POST", "/test/doc/1"); + index.setJsonEntity("{\"foo\": \"bar\"}"); + client().performRequest(index); + } + client().performRequest(new Request("GET", "/test/_search")); + final Request index4 = new Request("POST", "/test/doc/4"); + index4.setJsonEntity("{\"foo\": \"bar\"}"); + client().performRequest(index4); + client().performRequest(new Request("POST", "/test/_refresh")); + client().performRequest(new Request("GET", "/_cat/indices")); + client().performRequest(new Request("GET", "/_nodes")); + client().performRequest(new Request("GET", "/test/_search")); + client().performRequest(new Request("GET", "/_nodes/stats")); + client().performRequest(new Request("DELETE", "/test")); - Response response = client().performRequest("GET", "_nodes/usage"); + Response response = client().performRequest(new Request("GET", "_nodes/usage")); Map responseBodyMap = entityAsMap(response); assertThat(responseBodyMap, notNullValue()); Map _nodesMap = (Map) responseBodyMap.get("_nodes"); @@ -139,7 +137,7 @@ public void testWithRestUsage() throws IOException { public void testMetricsWithAll() throws IOException { ResponseException exception = expectThrows(ResponseException.class, - () -> client().performRequest("GET", "_nodes/usage/_all,rest_actions")); + () -> client().performRequest(new Request("GET", "_nodes/usage/_all,rest_actions"))); assertNotNull(exception); assertThat(exception.getMessage(), containsString("\"type\":\"illegal_argument_exception\"," + "\"reason\":\"request [_nodes/usage/_all,rest_actions] contains _all and individual metrics [_all,rest_actions]\"")); diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java index ce72af26628a1..a6fc7b9cce18e 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.test.rest; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.Request; import java.io.IOException; @@ -28,56 +29,56 @@ public class RequestsWithoutContentIT extends ESRestTestCase { public void testIndexMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "PUT", "/idx/type/123")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/idx/type/123"))); assertResponseException(responseException, "request body is required"); } public void testBulkMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "PUT", "/_bulk")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/_bulk"))); assertResponseException(responseException, "request body is required"); } public void testPutSettingsMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - "PUT", "/_settings")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request("PUT", "/_settings"))); assertResponseException(responseException, "request body is required"); } public void testPutMappingsMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "PUT", "/test_index/test_type/_mapping")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/test_index/test_type/_mapping"))); assertResponseException(responseException, "request body is required"); } public void testPutIndexTemplateMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "PUT" : "POST", "/_template/my_template")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "PUT" : "POST", "/_template/my_template"))); assertResponseException(responseException, "request body is required"); } public void testMultiSearchMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "GET", "/_msearch")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "GET", "/_msearch"))); assertResponseException(responseException, "request body or source parameter is required"); } public void testPutPipelineMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - "PUT", "/_ingest/pipeline/my_pipeline")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request("PUT", "/_ingest/pipeline/my_pipeline"))); assertResponseException(responseException, "request body or source parameter is required"); } public void testSimulatePipelineMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "GET", "/_ingest/pipeline/my_pipeline/_simulate")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "GET", "/_ingest/pipeline/my_pipeline/_simulate"))); assertResponseException(responseException, "request body or source parameter is required"); } public void testPutScriptMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "PUT", "/_scripts/lang")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/_scripts/lang"))); assertResponseException(responseException, "request body is required"); } diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java index 0b1ad2a6dd9ab..fab809a51bcc2 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java @@ -19,26 +19,21 @@ package org.elasticsearch.test.rest; -import org.apache.http.HttpEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.client.Request; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.HashMap; import java.util.Locale; import java.util.Map; -import static java.util.Collections.emptyMap; - /** * Tests that wait for refresh is fired if the index is closed. */ @@ -46,13 +41,14 @@ public class WaitForRefreshAndCloseTests extends ESRestTestCase { @Before public void setupIndex() throws IOException { try { - client().performRequest("DELETE", indexName()); + client().performRequest(new Request("DELETE", indexName())); } catch (ResponseException e) { // If we get an error, it should be because the index doesn't exist assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); } - client().performRequest("PUT", indexName(), emptyMap(), - new StringEntity("{\"settings\":{\"refresh_interval\":-1}}", ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", indexName()); + request.setJsonEntity("{\"settings\":{\"refresh_interval\":-1}}"); + client().performRequest(request); } @After @@ -69,17 +65,20 @@ private String docPath() { } public void testIndexAndThenClose() throws Exception { - closeWhileListenerEngaged(start("PUT", "", new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON))); + closeWhileListenerEngaged(start("PUT", "", "{\"test\":\"test\"}")); } public void testUpdateAndThenClose() throws Exception { - client().performRequest("PUT", docPath(), emptyMap(), new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON)); - closeWhileListenerEngaged(start("POST", "/_update", - new StringEntity("{\"doc\":{\"name\":\"test\"}}", ContentType.APPLICATION_JSON))); + Request request = new Request("PUT", docPath()); + request.setJsonEntity("{\"test\":\"test\"}"); + client().performRequest(request); + closeWhileListenerEngaged(start("POST", "/_update", "{\"doc\":{\"name\":\"test\"}}")); } public void testDeleteAndThenClose() throws Exception { - client().performRequest("PUT", docPath(), emptyMap(), new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", docPath()); + request.setJsonEntity("{\"test\":\"test\"}"); + client().performRequest(request); closeWhileListenerEngaged(start("DELETE", "", null)); } @@ -88,7 +87,7 @@ private void closeWhileListenerEngaged(ActionFuture future) throws Excep assertBusy(() -> { Map stats; try { - stats = entityAsMap(client().performRequest("GET", indexName() + "/_stats/refresh")); + stats = entityAsMap(client().performRequest(new Request("GET", indexName() + "/_stats/refresh"))); } catch (IOException e) { throw new RuntimeException(e); } @@ -105,18 +104,19 @@ private void closeWhileListenerEngaged(ActionFuture future) throws Excep }); // Close the index. That should flush the listener. - client().performRequest("POST", indexName() + "/_close"); + client().performRequest(new Request("POST", indexName() + "/_close")); // The request shouldn't fail. It certainly shouldn't hang. future.get(); } - private ActionFuture start(String method, String path, HttpEntity body) { + private ActionFuture start(String method, String path, String body) { PlainActionFuture future = new PlainActionFuture<>(); - Map params = new HashMap<>(); - params.put("refresh", "wait_for"); - params.put("error_trace", ""); - client().performRequestAsync(method, docPath() + path, params, body, new ResponseListener() { + Request request = new Request(method, docPath() + path); + request.addParameter("refresh", "wait_for"); + request.addParameter("error_trace", ""); + request.setJsonEntity(body); + client().performRequestAsync(request, new ResponseListener() { @Override public void onSuccess(Response response) { try { diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index c5c0f44caeb7a..e486735eb8fb4 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -100,3 +100,6 @@ ${error.file} # due to internationalization enhancements in JDK 9 Elasticsearch need to set the provider to COMPAT otherwise # time/date parsing will break in an incompatible way for some date patterns and locals 9-:-Djava.locale.providers=COMPAT + +# temporary workaround for C2 bug with JDK 10 on hardware with AVX-512 +10-:-XX:UseAVX=2 diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 1db551934c768..ceff354f15fe8 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -61,6 +61,7 @@ import org.elasticsearch.test.PosixPermissionsResetter; import org.junit.After; import org.junit.Before; +import org.junit.BeforeClass; import java.io.BufferedReader; import java.io.ByteArrayInputStream; @@ -139,6 +140,11 @@ public InstallPluginCommandTests(FileSystem fs, Function temp) { System.setProperty("java.io.tmpdir", temp.apply("tmpdir").toString()); } + @BeforeClass + public static void testIfFipsMode() { + assumeFalse("Can't run in a FIPS JVM because this depends on BouncyCastle (non-fips)", inFipsJvm()); + } + @Override @Before public void setUp() throws Exception { diff --git a/docs/java-api/admin/indices/put-mapping.asciidoc b/docs/java-api/admin/indices/put-mapping.asciidoc index 3e931dfd7b7e7..8bdcb4916976f 100644 --- a/docs/java-api/admin/indices/put-mapping.asciidoc +++ b/docs/java-api/admin/indices/put-mapping.asciidoc @@ -2,17 +2,22 @@ ==== Put Mapping -The PUT mapping API allows you to add a new type while creating an index: +You can add mappings for a new type at index creation time: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{client-tests}/IndicesDocumentationIT.java[index-with-mapping] -------------------------------------------------- <1> <> called `twitter` -<2> It also adds a `tweet` mapping type. +<2> Add a `tweet` type with a field called `message` that has the datatype `text`. +There are several variants of the above `addMapping` method, some taking an +`XContentBuilder` or a `Map` with the mapping definition as arguments. Make sure +to check the javadocs to pick the simplest one for your use case. -The PUT mapping API also allows to add a new type to an existing index: +The PUT mapping API also allows to specify the mapping of a type after index +creation. In this case you can provide the mapping as a String similar to the +Rest API syntax: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index cf38040e865db..d952870677b7e 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -57,7 +57,8 @@ The Java High Level REST Client supports the following Miscellaneous APIs: include::miscellaneous/main.asciidoc[] include::miscellaneous/ping.asciidoc[] -include::miscellaneous/x-pack-info.asciidoc[] +include::x-pack/x-pack-info.asciidoc[] +include::x-pack/watcher/put-watch.asciidoc[] == Indices APIs diff --git a/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc b/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc new file mode 100644 index 0000000000000..c803c54eb5e92 --- /dev/null +++ b/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc @@ -0,0 +1,55 @@ +[[java-rest-high-x-pack-watcher-put-watch]] +=== X-Pack Info API + +[[java-rest-high-x-pack-watcher-put-watch-execution]] +==== Execution + +General information about the installed {watcher} features can be retrieved +using the `watcher()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-execute] +-------------------------------------------------- +<1> Allows to store the watch, but to not trigger it. Defaults to `true` + +[[java-rest-high-x-pack-watcher-put-watch-response]] +==== Response + +The returned `XPackPutWatchResponse` contain `created`, `id`, +and `version` information. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-response] +-------------------------------------------------- +<1> `_id` contains id of the watch +<2> `created` is a boolean indicating whether the watch was created for the first time +<3> `_version` returns the newly created version + +[[java-rest-high-x-pack-watcher-put-watch-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-execute-async] +-------------------------------------------------- +<1> The `XPackPutWatchRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `XPackPutWatchResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/miscellaneous/x-pack-info.asciidoc b/docs/java-rest/high-level/x-pack/x-pack-info.asciidoc similarity index 100% rename from docs/java-rest/high-level/miscellaneous/x-pack-info.asciidoc rename to docs/java-rest/high-level/x-pack/x-pack-info.asciidoc diff --git a/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc b/docs/java-rest/high-level/x-pack/x-pack-usage.asciidoc similarity index 100% rename from docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc rename to docs/java-rest/high-level/x-pack/x-pack-usage.asciidoc diff --git a/docs/painless/painless-execute-script.asciidoc b/docs/painless/painless-execute-script.asciidoc index a3ac5b578d781..2aca959778699 100644 --- a/docs/painless/painless-execute-script.asciidoc +++ b/docs/painless/painless-execute-script.asciidoc @@ -9,23 +9,24 @@ The Painless execute API allows an arbitrary script to be executed and a result .Parameters [options="header"] |====== -| Name | Required | Default | Description -| `script` | yes | - | The script to execute -| `context` | no | `painless_test` | The context the script should be executed in. +| Name | Required | Default | Description +| `script` | yes | - | The script to execute +| `context` | no | `painless_test` | The context the script should be executed in. +| `context_setup` | no | - | Additional parameters to the context. |====== ==== Contexts Contexts control how scripts are executed, what variables are available at runtime and what the return type is. -===== Painless test script context +===== Painless test context The `painless_test` context executes scripts as is and do not add any special parameters. The only variable that is available is `params`, which can be used to access user defined values. The result of the script is always converted to a string. If no context is specified then this context is used by default. -==== Example +====== Example Request: @@ -52,4 +53,124 @@ Response: "result": "0.1" } -------------------------------------------------- -// TESTRESPONSE \ No newline at end of file +// TESTRESPONSE + +===== Filter context + +The `filter` context executes scripts as if they were executed inside a `script` query. +For testing purposes a document must be provided that will be indexed temporarily in-memory and +is accessible to the script being tested. Because of this the _source, stored fields and doc values +are available in the script being tested. + +The following parameters may be specified in `context_setup` for a filter context: + +document:: Contains the document that will be temporarily indexed in-memory and is accessible from the script. +index:: The name of an index containing a mapping that is compatable with the document being indexed. + +====== Example + +[source,js] +---------------------------------------------------------------- +PUT /my-index +{ + "mappings": { + "_doc": { + "properties": { + "field": { + "type": "keyword" + } + } + } + } +} + +POST /_scripts/painless/_execute +{ + "script": { + "source": "doc['field'].value.length() <= params.max_length", + "params": { + "max_length": 4 + } + }, + "context": "filter", + "context_setup": { + "index": "my-index", + "document": { + "field": "four" + } + } +} +---------------------------------------------------------------- +// CONSOLE + +Response: + +[source,js] +-------------------------------------------------- +{ + "result": true +} +-------------------------------------------------- +// TESTRESPONSE + + +===== Score context + +The `score` context executes scripts as if they were executed inside a `script_score` function in +`function_score` query. + +The following parameters may be specified in `context_setup` for a score context: + +document:: Contains the document that will be temporarily indexed in-memory and is accessible from the script. +index:: The name of an index containing a mapping that is compatable with the document being indexed. +query:: If `_score` is used in the script then a query can specified that will be used to compute a score. + +====== Example + +[source,js] +---------------------------------------------------------------- +PUT /my-index +{ + "mappings": { + "_doc": { + "properties": { + "field": { + "type": "keyword" + }, + "rank": { + "type": "long" + } + } + } + } +} + + +POST /_scripts/painless/_execute +{ + "script": { + "source": "doc['rank'].value / params.max_rank", + "params": { + "max_rank": 5.0 + } + }, + "context": "score", + "context_setup": { + "index": "my-index", + "document": { + "rank": 4 + } + } +} +---------------------------------------------------------------- +// CONSOLE + +Response: + +[source,js] +-------------------------------------------------- +{ + "result": 0.8 +} +-------------------------------------------------- +// TESTRESPONSE diff --git a/docs/painless/painless-getting-started.asciidoc b/docs/painless/painless-getting-started.asciidoc index 887769e49abbe..1dec4a33bb583 100644 --- a/docs/painless/painless-getting-started.asciidoc +++ b/docs/painless/painless-getting-started.asciidoc @@ -123,21 +123,8 @@ GET hockey/_search [float] ===== Missing values -If you request the value from a field `field` that isn’t in -the document, `doc['field'].value` for this document returns: - -- `0` if a `field` has a numeric datatype (long, double etc.) -- `false` is a `field` has a boolean datatype -- epoch date if a `field` has a date datatype -- `null` if a `field` has a string datatype -- `null` if a `field` has a geo datatype -- `""` if a `field` has a binary datatype - -IMPORTANT: Starting in 7.0, `doc['field'].value` throws an exception if -the field is missing in a document. To enable this behavior now, -set a {ref}/jvm-options.html[`jvm.option`] -`-Des.scripting.exception_for_missing_value=true` on a node. If you do not enable -this behavior, a deprecation warning is logged on start up. +`doc['field'].value` throws an exception if +the field is missing in a document. To check if a document is missing a value, you can call `doc['field'].size() == 0`. diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index 0d73e35f18ec3..19ead367204ba 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -13,8 +13,8 @@ include::install_remove.asciidoc[] ==== Getting started with AWS The plugin provides a repository type named `s3` which may be used when creating a repository. -The repository defaults to using -http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html[IAM Role] +The repository defaults to using https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html[ECS IAM Role] or +http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html[EC2 IAM Role] credentials for authentication. The only mandatory setting is the bucket name: [source,js] diff --git a/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc index afaf4f8fa8c46..05687f8669155 100644 --- a/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc @@ -8,8 +8,9 @@ contained in a predefined set. [float] === Options [horizontal] -types:: a list of types to keep - +types:: a list of types to include (default mode) or exclude +mode:: if set to `include` (default) the specified token types will be kept, +if set to `exclude` the specified token types will be removed from the stream [float] === Settings example @@ -53,7 +54,7 @@ POST /keep_types_example/_analyze // CONSOLE // TEST[continued] -And it'd respond: +The response will be: [source,js] -------------------------------------------------- @@ -72,3 +73,70 @@ And it'd respond: // TESTRESPONSE Note how only the `` token is in the output. + +=== Exclude mode settings example + +If the `mode` parameter is set to `exclude` like in the following example: + +[source,js] +-------------------------------------------------- +PUT /keep_types_exclude_example +{ + "settings" : { + "analysis" : { + "analyzer" : { + "my_analyzer" : { + "tokenizer" : "standard", + "filter" : ["standard", "lowercase", "remove_numbers"] + } + }, + "filter" : { + "remove_numbers" : { + "type" : "keep_types", + "mode" : "exclude", + "types" : [ "" ] + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +And we test it like: + +[source,js] +-------------------------------------------------- +POST /keep_types_exclude_example/_analyze +{ + "analyzer" : "my_analyzer", + "text" : "hello 101 world" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +The response will be: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "hello", + "start_offset": 0, + "end_offset": 5, + "type": "", + "position": 0 + }, + { + "token": "world", + "start_offset": 10, + "end_offset": 15, + "type": "", + "position": 2 + } + ] +} +-------------------------------------------------- +// TESTRESPONSE \ No newline at end of file diff --git a/docs/reference/indices/clearcache.asciidoc b/docs/reference/indices/clearcache.asciidoc index 6a7240dc9586e..6b90f9d49cba8 100644 --- a/docs/reference/indices/clearcache.asciidoc +++ b/docs/reference/indices/clearcache.asciidoc @@ -16,7 +16,8 @@ explicitly by setting `query`, `fielddata` or `request`. All caches relating to a specific field(s) can also be cleared by specifying `fields` parameter with a comma delimited list of the -relevant fields. +relevant fields. Note that the provided names must refer to concrete +fields -- objects and field aliases are not supported. [float] === Multi Index diff --git a/docs/reference/mapping.asciidoc b/docs/reference/mapping.asciidoc index 6f8f1b38d6f22..31957344baf99 100644 --- a/docs/reference/mapping.asciidoc +++ b/docs/reference/mapping.asciidoc @@ -124,8 +124,10 @@ fields to an existing index with the <>. Other than where documented, *existing field mappings cannot be updated*. Changing the mapping would mean invalidating already indexed -documents. Instead, you should create a new index with the correct mappings -and <> your data into that index. +documents. Instead, you should create a new index with the correct mappings +and <> your data into that index. If you only wish +to rename a field and not change its mappings, it may make sense to introduce +an <> field. [float] == Example mapping diff --git a/docs/reference/mapping/types.asciidoc b/docs/reference/mapping/types.asciidoc index 9b71d7e740495..fbd8181d0959a 100644 --- a/docs/reference/mapping/types.asciidoc +++ b/docs/reference/mapping/types.asciidoc @@ -40,6 +40,8 @@ string:: <> and <> <>:: Defines parent/child relation for documents within the same index +<>:: Defines an alias to an existing field. + <>:: Record numeric features to boost hits at query time. <>:: Record numeric feature vectors to boost hits at query time. @@ -58,6 +60,8 @@ the <>, the This is the purpose of _multi-fields_. Most datatypes support multi-fields via the <> parameter. +include::types/alias.asciidoc[] + include::types/array.asciidoc[] include::types/binary.asciidoc[] diff --git a/docs/reference/mapping/types/alias.asciidoc b/docs/reference/mapping/types/alias.asciidoc new file mode 100644 index 0000000000000..d2b5ccdce8aff --- /dev/null +++ b/docs/reference/mapping/types/alias.asciidoc @@ -0,0 +1,101 @@ +[[alias]] +=== Alias datatype + +An `alias` mapping defines an alternate name for a field in the index. +The alias can be used in place of the target field in <> requests, +and selected other APIs like <>. + +[source,js] +-------------------------------- +PUT trips +{ + "mappings": { + "_doc": { + "properties": { + "distance": { + "type": "long" + }, + "route_length_miles": { + "type": "alias", + "path": "distance" // <1> + }, + "transit_mode": { + "type": "keyword" + } + } + } + } +} + +GET _search +{ + "query": { + "range" : { + "route_length_miles" : { + "gte" : 39 + } + } + } +} +-------------------------------- +// CONSOLE + +<1> The path to the target field. Note that this must be the full path, including any parent +objects (e.g. `object1.object2.field`). + +Almost all components of the search request accept field aliases. In particular, aliases can be +used in queries, aggregations, and sort fields, as well as when requesting `docvalue_fields`, +`stored_fields`, suggestions, and highlights. Scripts also support aliases when accessing +field values. Please see the section on <> for exceptions. + +In some parts of the search request and when requesting field capabilities, field wildcard patterns can be +provided. In these cases, the wildcard pattern will match field aliases in addition to concrete fields: + +[source,js] +-------------------------------- +GET trips/_field_caps?fields=route_*,transit_mode +-------------------------------- +// CONSOLE +// TEST[continued] + +[[alias-targets]] +==== Alias targets + +There are a few restrictions on the target of an alias: + + * The target must be a concrete field, and not an object or another field alias. + * The target field must exist at the time the alias is created. + * If nested objects are defined, a field alias must have the same nested scope as its target. + +Additionally, a field alias can only have one target. This means that it is not possible to use a +field alias to query over multiple target fields in a single clause. + +[[unsupported-apis]] +==== Unsupported APIs + +Writes to field aliases are not supported: attempting to use an alias in an index or update request +will result in a failure. Likewise, aliases cannot be used as the target of `copy_to`. + +Because alias names are not present in the document source, aliases cannot be used when performing +source filtering. For example, the following request will return an empty result for `_source`: + +[source,js] +-------------------------------- +GET /_search +{ + "query" : { + "match_all": {} + }, + "_source": "route_length_miles" +} +-------------------------------- +// CONSOLE +// TEST[continued] + +Currently only the search and field capabilities APIs will accept and resolve field aliases. +Other APIs that accept field names, such as <>, cannot be used +with field aliases. + +Finally, some queries, such as `terms`, `geo_shape`, and `more_like_this`, allow for fetching query +information from an indexed document. Because field aliases aren't supported when fetching documents, +the part of the query that specifies the lookup path cannot refer to a field by its alias. \ No newline at end of file diff --git a/docs/reference/modules/transport.asciidoc b/docs/reference/modules/transport.asciidoc index 046d82cc507eb..257181f70c507 100644 --- a/docs/reference/modules/transport.asciidoc +++ b/docs/reference/modules/transport.asciidoc @@ -59,7 +59,8 @@ It also uses the common [float] ==== TCP Transport Profiles -Elasticsearch allows you to bind to multiple ports on different interfaces by the use of transport profiles. See this example configuration +Elasticsearch allows you to bind to multiple ports on different interfaces by +the use of transport profiles. See this example configuration [source,yaml] -------------- @@ -71,10 +72,12 @@ transport.profiles.dmz.port: 9700-9800 transport.profiles.dmz.bind_host: 172.16.1.2 -------------- -The `default` profile is a special. It is used as fallback for any other profiles, if those do not have a specific configuration setting set. -Note that the default profile is how other nodes in the cluster will connect to this node usually. In the future this feature will allow to enable node-to-node communication via multiple interfaces. +The `default` profile is special. It is used as a fallback for any other +profiles, if those do not have a specific configuration setting set, and is how +this node connects to other nodes in the cluster. -The following parameters can be configured like that +The following parameters can be configured on each transport profile, as in the +example above: * `port`: The port to bind to * `bind_host`: The host to bind diff --git a/docs/reference/search/request/preference.asciidoc b/docs/reference/search/request/preference.asciidoc index 4fd801c5f76e3..5f3fcb2efa6b7 100644 --- a/docs/reference/search/request/preference.asciidoc +++ b/docs/reference/search/request/preference.asciidoc @@ -1,38 +1,55 @@ [[search-request-preference]] === Preference -Controls a `preference` of which shard copies on which to execute the -search. By default, the operation is randomized among the available shard -copies, unless allocation awareness is used. +Controls a `preference` of the shard copies on which to execute the search. By +default, Elasticsearch selects from the available shard copies in an +unspecified order, taking the <> and +<> configuration into +account. However, it may sometimes be desirable to try and route certain +searches to certain sets of shard copies, for instance to make better use of +per-copy caches. The `preference` is a query string parameter which can be set to: [horizontal] -`_local`:: - The operation will prefer to be executed on a local - allocated shard if possible. +`_only_local`:: + The operation will be executed only on shards allocated to the local + node. + +`_local`:: + The operation will be executed on shards allocated to the local node if + possible, and will fall back to other shards if not. `_prefer_nodes:abc,xyz`:: - Prefers execution on the nodes with the provided - node ids (`abc` or `xyz` in this case) if applicable. + The operation will be executed on nodes with one of the provided node + ids (`abc` or `xyz` in this case) if possible. If suitable shard copies + exist on more than one of the selected nodes then the order of + preference between these copies is unspecified. -`_shards:2,3`:: - Restricts the operation to the specified shards. (`2` - and `3` in this case). This preference can be combined with other - preferences but it has to appear first: `_shards:2,3|_local` +`_shards:2,3`:: + Restricts the operation to the specified shards. (`2` and `3` in this + case). This preference can be combined with other preferences but it + has to appear first: `_shards:2,3|_local` -`_only_nodes`:: - Restricts the operation to nodes specified in <> +`_only_nodes:abc*,x*yz,...`:: + Restricts the operation to nodes specified according to the + <>. If suitable shard copies exist on more + than one of the selected nodes then the order of preference between + these copies is unspecified. -Custom (string) value:: - A custom value will be used to guarantee that - the same shards will be used for the same custom value. This can help - with "jumping values" when hitting different shards in different refresh - states. A sample value can be something like the web session id, or the - user name. +Custom (string) value:: + Any value that does not start with `_`. If two searches both give the same + custom string value for their preference and the underlying cluster state + does not change then the same ordering of shards will be used for the + searches. This does not guarantee that the exact same shards will be used + each time: the cluster state, and therefore the selected shards, may change + for a number of reasons including shard relocations and shard failures, and + nodes may sometimes reject searches causing fallbacks to alternative nodes. + However, in practice the ordering of shards tends to remain stable for long + periods of time. A good candidate for a custom preference value is something + like the web session id or the user name. -For instance, use the user's session ID to ensure consistent ordering of results -for the user: +For instance, use the user's session ID `xyzabc123` as follows: [source,js] ------------------------------------------------ @@ -47,3 +64,9 @@ GET /_search?preference=xyzabc123 ------------------------------------------------ // CONSOLE +NOTE: The `_only_local` preference guarantees only to use shard copies on the +local node, which is sometimes useful for troubleshooting. All other options do +not _fully_ guarantee that any particular shard copies are used in a search, +and on a changing index this may mean that repeated searches may yield +different results if they are executed on different shard copies which are in +different refresh states. diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 37e3d3699fafc..94161917d1878 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-4.8.1-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-4.9-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=ce1645ff129d11aad62dab70d63426fdce6cfd646fa309dc5dc5255dd03c7c11 +distributionSha256Sum=39e2d5803bbd5eaf6c8efe07067b0e5a00235e8c71318642b2ed262920b27721 diff --git a/libs/cli/build.gradle b/libs/cli/build.gradle index 91fbca19eca99..00d6d96ef0d59 100644 --- a/libs/cli/build.gradle +++ b/libs/cli/build.gradle @@ -24,16 +24,6 @@ apply plugin: 'nebula.optional-base' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' -publishing { - publications { - nebula { - artifactId 'elasticsearch-cli' - } - } -} - -archivesBaseName = 'elasticsearch-cli' - dependencies { compile 'net.sf.jopt-simple:jopt-simple:5.0.2' compile "org.elasticsearch:elasticsearch-core:${version}" diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactory.java index d4e1e794a309b..4920b7daae852 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactory.java @@ -39,7 +39,7 @@ public class CharGroupTokenizerFactory extends AbstractTokenizerFactory{ private boolean tokenizeOnSymbol = false; public CharGroupTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); for (final String c : settings.getAsList("tokenize_on_chars")) { if (c == null || c.length() == 0) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java index e81f6b88d248c..27316f4cde5e7 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java @@ -35,7 +35,7 @@ public class ClassicTokenizerFactory extends AbstractTokenizerFactory { private final int maxTokenLength; ClassicTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java index 55a527cc792c8..9bb17abf0cd02 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java @@ -36,7 +36,7 @@ public class EdgeNGramTokenizerFactory extends AbstractTokenizerFactory { private final CharMatcher matcher; EdgeNGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); this.matcher = parseTokenChars(settings.getAsList("token_chars")); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java index 0f94b521e4b7d..b6b8b45fabfc2 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java @@ -29,21 +29,47 @@ import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Set; /** * A {@link TokenFilterFactory} for {@link TypeTokenFilter}. This filter only * keep tokens that are contained in the set configured via - * {@value #KEEP_TYPES_KEY} setting. + * {@value #KEEP_TYPES_MODE_KEY} setting. *

* Configuration options: *

    - *
  • {@value #KEEP_TYPES_KEY} the array of words / tokens to keep.
  • + *
  • {@value #KEEP_TYPES_KEY} the array of words / tokens.
  • + *
  • {@value #KEEP_TYPES_MODE_KEY} whether to keep ("include") or discard + * ("exclude") the specified token types.
  • *
*/ public class KeepTypesFilterFactory extends AbstractTokenFilterFactory { private final Set keepTypes; - private static final String KEEP_TYPES_KEY = "types"; + private final KeepTypesMode includeMode; + static final String KEEP_TYPES_KEY = "types"; + static final String KEEP_TYPES_MODE_KEY = "mode"; + + enum KeepTypesMode { + INCLUDE, EXCLUDE; + + @Override + public String toString() { + return this.name().toLowerCase(Locale.ROOT); + } + + private static KeepTypesMode fromString(String modeString) { + String lc = modeString.toLowerCase(Locale.ROOT); + if (lc.equals("include")) { + return INCLUDE; + } else if (lc.equals("exclude")) { + return EXCLUDE; + } else { + throw new IllegalArgumentException("`keep_types` tokenfilter mode can only be [" + KeepTypesMode.INCLUDE + "] or [" + + KeepTypesMode.EXCLUDE + "] but was [" + modeString + "]."); + } + } + }; KeepTypesFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); @@ -52,12 +78,12 @@ public class KeepTypesFilterFactory extends AbstractTokenFilterFactory { if ((arrayKeepTypes == null)) { throw new IllegalArgumentException("keep_types requires `" + KEEP_TYPES_KEY + "` to be configured"); } - + this.includeMode = KeepTypesMode.fromString(settings.get(KEEP_TYPES_MODE_KEY, "include")); this.keepTypes = new HashSet<>(arrayKeepTypes); } @Override public TokenStream create(TokenStream tokenStream) { - return new TypeTokenFilter(tokenStream, keepTypes, true); + return new TypeTokenFilter(tokenStream, keepTypes, includeMode == KeepTypesMode.INCLUDE); } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordTokenizerFactory.java index abe88462cb996..e4bf2c8c4ad4e 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordTokenizerFactory.java @@ -31,7 +31,7 @@ public class KeywordTokenizerFactory extends AbstractTokenizerFactory { private final int bufferSize; KeywordTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); bufferSize = settings.getAsInt("buffer_size", 256); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java index be98eb73a9cad..cba30cb63c36b 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java @@ -29,7 +29,7 @@ public class LetterTokenizerFactory extends AbstractTokenizerFactory { LetterTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java index 8f0c5f759aa64..8c913a33cfe4c 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java @@ -30,7 +30,7 @@ public class LowerCaseTokenizerFactory extends AbstractTokenizerFactory implements MultiTermAwareComponent { LowerCaseTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java index b67f67cb2fa75..b00797428b79a 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java @@ -85,7 +85,7 @@ static CharMatcher parseTokenChars(List characterClasses) { } NGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); int maxAllowedNgramDiff = indexSettings.getMaxNgramDiff(); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java index c877fe6944e5b..5b966c1c3b8df 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java @@ -37,7 +37,7 @@ public class PathHierarchyTokenizerFactory extends AbstractTokenizerFactory { private final boolean reverse; PathHierarchyTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); bufferSize = settings.getAsInt("buffer_size", 1024); String delimiter = settings.get("delimiter"); if (delimiter == null) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java index f850b68ac9829..11ba7e44db0e4 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java @@ -35,7 +35,7 @@ public class PatternTokenizerFactory extends AbstractTokenizerFactory { private final int group; PatternTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); String sPattern = settings.get("pattern", "\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/); if (sPattern == null) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternSplitTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternSplitTokenizerFactory.java index f861ec3792f5e..0faf407829577 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternSplitTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternSplitTokenizerFactory.java @@ -31,7 +31,7 @@ public class SimplePatternSplitTokenizerFactory extends AbstractTokenizerFactory private final String pattern; public SimplePatternSplitTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); pattern = settings.get("pattern", ""); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternTokenizerFactory.java index 6db3cfa67a318..67aee333d0ffd 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternTokenizerFactory.java @@ -31,7 +31,7 @@ public class SimplePatternTokenizerFactory extends AbstractTokenizerFactory { private final String pattern; public SimplePatternTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); pattern = settings.get("pattern", ""); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java index b76aca42d36ee..861ade079a08e 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java @@ -32,7 +32,7 @@ public class ThaiTokenizerFactory extends AbstractTokenizerFactory { ThaiTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java index 8040c88ea7fa5..cd02eec24b42c 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java @@ -32,7 +32,7 @@ public class UAX29URLEmailTokenizerFactory extends AbstractTokenizerFactory { private final int maxTokenLength; UAX29URLEmailTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java index 1f89d4688136f..7ce6a361cbad2 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java @@ -34,7 +34,7 @@ public class WhitespaceTokenizerFactory extends AbstractTokenizerFactory { private Integer maxTokenLength; WhitespaceTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); maxTokenLength = settings.getAsInt(MAX_TOKEN_LENGTH, StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java index a19882d6faa00..d0c7723457ff3 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java @@ -34,19 +34,51 @@ import static org.hamcrest.Matchers.instanceOf; public class KeepTypesFilterFactoryTests extends ESTokenStreamTestCase { - public void testKeepTypes() throws IOException { - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put("index.analysis.filter.keep_numbers.type", "keep_types") - .putList("index.analysis.filter.keep_numbers.types", new String[] {"", ""}) - .build(); + + private static final String BASE_SETTING = "index.analysis.filter.keep_numbers"; + + public void testKeepTypesInclude() throws IOException { + Settings.Builder settingsBuilder = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(BASE_SETTING + ".type", "keep_types") + .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }); + // either use default mode or set "include" mode explicitly + if (random().nextBoolean()) { + settingsBuilder.put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, + KeepTypesFilterFactory.KeepTypesMode.INCLUDE); + } + Settings settings = settingsBuilder.build(); + ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers"); + assertThat(tokenFilter, instanceOf(KeepTypesFilterFactory.class)); + String source = "Hello 123 world"; + String[] expected = new String[] { "123" }; + Tokenizer tokenizer = new StandardTokenizer(); + tokenizer.setReader(new StringReader(source)); + assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[] { 2 }); + } + + public void testKeepTypesExclude() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(BASE_SETTING + ".type", "keep_types") + .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }) + .put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, KeepTypesFilterFactory.KeepTypesMode.EXCLUDE).build(); ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers"); assertThat(tokenFilter, instanceOf(KeepTypesFilterFactory.class)); String source = "Hello 123 world"; - String[] expected = new String[]{"123"}; + String[] expected = new String[] { "Hello", "world" }; Tokenizer tokenizer = new StandardTokenizer(); tokenizer.setReader(new StringReader(source)); - assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[]{2}); + assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[] { 1, 2 }); + } + + public void testKeepTypesException() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(BASE_SETTING + ".type", "keep_types") + .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }) + .put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, "bad_parameter").build(); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin())); + assertEquals("`keep_types` tokenfilter mode can only be [include] or [exclude] but was [bad_parameter].", ex.getMessage()); } } diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml index 3dca3bfd7770c..150fa39dcb956 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml @@ -1557,3 +1557,18 @@ filter: [my_bengali_stem] - length: { tokens: 1 } - match: { tokens.0.token: কর } + +--- +"multiplexer": + - do: + indices.analyze: + body: + text: "The quick fox" + tokenizer: "standard" + filter: + - type: multiplexer + filters: [ lowercase, uppercase ] + preserve_original: false + - length: { tokens: 6 } + - match: { tokens.0.token: the } + - match: { tokens.1.token: THE } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java index 43a5f9245b185..23aac797859e7 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java @@ -114,6 +114,8 @@ public void testInvalidJodaPattern() { } public void testJodaPatternLocale() { + //TODO investigate if this is a bug in Joda + assumeFalse("Can't run in a FIPS JVM, Joda parse date error", inFipsJvm()); DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ITALIAN), "date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date"); diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java index 591cc9ce47737..33e6239002eb1 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java @@ -20,27 +20,52 @@ package org.elasticsearch.script.expression; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.fielddata.AtomicNumericFieldData; +import org.elasticsearch.index.fielddata.IndexNumericFieldData; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.ESTestCase; +import java.io.IOException; import java.text.ParseException; import java.util.Collections; -public class ExpressionTests extends ESSingleNodeTestCase { - ExpressionScriptEngine service; - SearchLookup lookup; +import static org.mockito.Matchers.anyInt; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ExpressionTests extends ESTestCase { + private ExpressionScriptEngine service; + private SearchLookup lookup; @Override public void setUp() throws Exception { super.setUp(); - IndexService index = createIndex("test", Settings.EMPTY, "type", "d", "type=double"); + + NumberFieldType fieldType = new NumberFieldType(NumberType.DOUBLE); + MapperService mapperService = mock(MapperService.class); + when(mapperService.fullName("field")).thenReturn(fieldType); + when(mapperService.fullName("alias")).thenReturn(fieldType); + + SortedNumericDoubleValues doubleValues = mock(SortedNumericDoubleValues.class); + when(doubleValues.advanceExact(anyInt())).thenReturn(true); + when(doubleValues.nextValue()).thenReturn(2.718); + + AtomicNumericFieldData atomicFieldData = mock(AtomicNumericFieldData.class); + when(atomicFieldData.getDoubleValues()).thenReturn(doubleValues); + + IndexNumericFieldData fieldData = mock(IndexNumericFieldData.class); + when(fieldData.getFieldName()).thenReturn("field"); + when(fieldData.load(anyObject())).thenReturn(atomicFieldData); + service = new ExpressionScriptEngine(Settings.EMPTY); - QueryShardContext shardContext = index.newQueryShardContext(0, null, () -> 0, null); - lookup = new SearchLookup(index.mapperService(), shardContext::getForField, null); + lookup = new SearchLookup(mapperService, ignored -> fieldData, null); } private SearchScript.LeafFactory compile(String expression) { @@ -50,22 +75,38 @@ private SearchScript.LeafFactory compile(String expression) { public void testNeedsScores() { assertFalse(compile("1.2").needs_score()); - assertFalse(compile("doc['d'].value").needs_score()); + assertFalse(compile("doc['field'].value").needs_score()); assertTrue(compile("1/_score").needs_score()); - assertTrue(compile("doc['d'].value * _score").needs_score()); + assertTrue(compile("doc['field'].value * _score").needs_score()); } public void testCompileError() { ScriptException e = expectThrows(ScriptException.class, () -> { - compile("doc['d'].value * *@#)(@$*@#$ + 4"); + compile("doc['field'].value * *@#)(@$*@#$ + 4"); }); assertTrue(e.getCause() instanceof ParseException); } public void testLinkError() { ScriptException e = expectThrows(ScriptException.class, () -> { - compile("doc['e'].value * 5"); + compile("doc['nonexistent'].value * 5"); }); assertTrue(e.getCause() instanceof ParseException); } + + public void testFieldAccess() throws IOException { + SearchScript script = compile("doc['field'].value").newInstance(null); + script.setDocument(1); + + double result = script.runAsDouble(); + assertEquals(2.718, result, 0.0); + } + + public void testFieldAccessWithFieldAlias() throws IOException { + SearchScript script = compile("doc['alias'].value").newInstance(null); + script.setDocument(1); + + double result = script.runAsDouble(); + assertEquals(2.718, result, 0.0); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index 6cfc7ff6ebfd2..fe53a3c11001c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -466,8 +466,8 @@ public static PainlessCast getLegalCast(Location location, Class actual, Clas return PainlessCast.standard(actual, expected, explicit); } else { throw location.createError(new ClassCastException("Cannot cast from " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(actual) + "] to " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "] to " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "].")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 78db712d183d2..dad8da06e76b1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.lang.invoke.CallSite; import java.lang.invoke.MethodHandle; @@ -185,7 +184,7 @@ static MethodHandle arrayLengthGetter(Class arrayType) { * @throws IllegalArgumentException if no matching whitelisted method was found. */ static PainlessMethod lookupMethodInternal(PainlessLookup painlessLookup, Class receiverClass, String name, int arity) { - PainlessMethodKey key = new PainlessMethodKey(name, arity); + String key = PainlessLookupUtility.buildPainlessMethodKey(name, arity); // check whitelist for matching method for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(clazz); @@ -303,7 +302,7 @@ static MethodHandle lookupMethod(PainlessLookup painlessLookup, MethodHandles.Lo nestedType, 0, DefBootstrap.REFERENCE, - PainlessLookupUtility.anyTypeToPainlessTypeName(interfaceType)); + PainlessLookupUtility.typeToCanonicalTypeName(interfaceType)); filter = nested.dynamicInvoker(); } else { throw new AssertionError(); @@ -334,8 +333,8 @@ static MethodHandle lookupReference(PainlessLookup painlessLookup, MethodHandles } int arity = interfaceMethod.arguments.size(); PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity); - return lookupReferenceInternal(painlessLookup, methodHandlesLookup, interfaceType, implMethod.owner.name, - implMethod.name, receiverClass); + return lookupReferenceInternal(painlessLookup, methodHandlesLookup, interfaceType, + PainlessLookupUtility.typeToCanonicalTypeName(implMethod.target), implMethod.name, receiverClass); } /** Returns a method handle to an implementation of clazz, given method reference signature. */ @@ -348,7 +347,7 @@ private static MethodHandle lookupReferenceInternal(PainlessLookup painlessLooku PainlessMethod interfaceMethod = painlessLookup.getPainlessStructFromJavaClass(clazz).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(clazz) + "], not a functional interface"); + "to [" + PainlessLookupUtility.typeToCanonicalTypeName(clazz) + "], not a functional interface"); } int arity = interfaceMethod.arguments.size() + captures.length; final MethodHandle handle; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index 9e72dc2c83576..aa72724b93029 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; import java.lang.invoke.MethodType; @@ -102,22 +101,22 @@ public FunctionRef(Class expected, PainlessMethod interfaceMethod, PainlessMe interfaceMethodType = interfaceMethod.getMethodType().dropParameterTypes(0, 1); // the Painless$Script class can be inferred if owner is null - if (delegateMethod.owner == null) { + if (delegateMethod.target == null) { delegateClassName = CLASS_NAME; isDelegateInterface = false; } else if (delegateMethod.augmentation != null) { delegateClassName = delegateMethod.augmentation.getName(); isDelegateInterface = delegateMethod.augmentation.isInterface(); } else { - delegateClassName = delegateMethod.owner.clazz.getName(); - isDelegateInterface = delegateMethod.owner.clazz.isInterface(); + delegateClassName = delegateMethod.target.getName(); + isDelegateInterface = delegateMethod.target.isInterface(); } if ("".equals(delegateMethod.name)) { delegateInvokeType = H_NEWINVOKESPECIAL; } else if (Modifier.isStatic(delegateMethod.modifiers)) { delegateInvokeType = H_INVOKESTATIC; - } else if (delegateMethod.owner.clazz.isInterface()) { + } else if (delegateMethod.target.isInterface()) { delegateInvokeType = H_INVOKEINTERFACE; } else { delegateInvokeType = H_INVOKEVIRTUAL; @@ -169,7 +168,7 @@ private static PainlessMethod lookup(PainlessLookup painlessLookup, Class exp PainlessMethod method = painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod; if (method == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface"); + "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); } // lookup requested method @@ -177,10 +176,11 @@ private static PainlessMethod lookup(PainlessLookup painlessLookup, Class exp final PainlessMethod impl; // ctor ref if ("new".equals(call)) { - impl = struct.constructors.get(new PainlessMethodKey("", method.arguments.size())); + impl = struct.constructors.get(PainlessLookupUtility.buildPainlessMethodKey("", method.arguments.size())); } else { // look for a static impl first - PainlessMethod staticImpl = struct.staticMethods.get(new PainlessMethodKey(call, method.arguments.size())); + PainlessMethod staticImpl = + struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.arguments.size())); if (staticImpl == null) { // otherwise a virtual impl final int arity; @@ -191,7 +191,7 @@ private static PainlessMethod lookup(PainlessLookup painlessLookup, Class exp // receiver passed arity = method.arguments.size() - 1; } - impl = struct.methods.get(new PainlessMethodKey(call, arity)); + impl = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey(call, arity)); } else { impl = staticImpl; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index e797740fed185..804f6aa2b689c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Arrays; import java.util.Collection; @@ -144,7 +143,7 @@ public Variable getVariable(Location location, String name) { } /** Looks up a method. Returns null if the method does not exist. */ - public PainlessMethod getMethod(PainlessMethodKey key) { + public PainlessMethod getMethod(String key) { PainlessMethod method = lookupMethod(key); if (method != null) { return method; @@ -200,7 +199,7 @@ public PainlessLookup getPainlessLookup() { // variable name -> variable private Map variables; // method name+arity -> methods - private Map methods; + private Map methods; /** * Create a new Locals @@ -238,7 +237,7 @@ private Variable lookupVariable(Location location, String name) { } /** Looks up a method at this scope only. Returns null if the method does not exist. */ - private PainlessMethod lookupMethod(PainlessMethodKey key) { + private PainlessMethod lookupMethod(String key) { if (methods == null) { return null; } @@ -261,7 +260,7 @@ private void addMethod(PainlessMethod method) { if (methods == null) { methods = new HashMap<>(); } - methods.put(new PainlessMethodKey(method.name, method.arguments.size()), method); + methods.put(PainlessLookupUtility.buildPainlessMethodKey(method.name, method.arguments.size()), method); // TODO: check result } @@ -293,7 +292,7 @@ public int getSlot() { @Override public String toString() { StringBuilder b = new StringBuilder(); - b.append("Variable[type=").append(PainlessLookupUtility.anyTypeToPainlessTypeName(clazz)); + b.append("Variable[type=").append(PainlessLookupUtility.typeToCanonicalTypeName(clazz)); b.append(",name=").append(name); b.append(",slot=").append(slot); if (readonly) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index 01139f6cf2e70..094a62d188baf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -18,41 +18,75 @@ */ package org.elasticsearch.painless; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; +import org.apache.lucene.store.RAMDirectory; +import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.script.FilterScript; +import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.Locale; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -75,40 +109,181 @@ public Response newResponse() { return new Response(); } - public static class Request extends ActionRequest implements ToXContent { + public static class Request extends SingleShardRequest implements ToXContent { private static final ParseField SCRIPT_FIELD = new ParseField("script"); private static final ParseField CONTEXT_FIELD = new ParseField("context"); + private static final ParseField CONTEXT_SETUP_FIELD = new ParseField("context_setup"); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "painless_execute_request", args -> new Request((Script) args[0], (SupportedContext) args[1])); + "painless_execute_request", args -> new Request((Script) args[0], (String) args[1], (ContextSetup) args[2])); static { PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> Script.parse(p), SCRIPT_FIELD); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { - // For now only accept an empty json object: - XContentParser.Token token = p.nextToken(); - assert token == XContentParser.Token.FIELD_NAME; - String contextType = p.currentName(); - token = p.nextToken(); - assert token == XContentParser.Token.START_OBJECT; - token = p.nextToken(); - assert token == XContentParser.Token.END_OBJECT; - token = p.nextToken(); - assert token == XContentParser.Token.END_OBJECT; - return SupportedContext.valueOf(contextType.toUpperCase(Locale.ROOT)); - }, CONTEXT_FIELD); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), CONTEXT_FIELD); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ContextSetup::parse, CONTEXT_SETUP_FIELD); + } + + static final Map> SUPPORTED_CONTEXTS; + + static { + Map> supportedContexts = new HashMap<>(); + supportedContexts.put("painless_test", PainlessTestScript.CONTEXT); + supportedContexts.put("filter", FilterScript.CONTEXT); + supportedContexts.put("score", ScoreScript.CONTEXT); + SUPPORTED_CONTEXTS = Collections.unmodifiableMap(supportedContexts); + } + + static ScriptContext fromScriptContextName(String name) { + ScriptContext scriptContext = SUPPORTED_CONTEXTS.get(name); + if (scriptContext == null) { + throw new UnsupportedOperationException("unsupported script context name [" + name + "]"); + } + return scriptContext; + } + + static class ContextSetup implements Writeable, ToXContentObject { + + private static final ParseField INDEX_FIELD = new ParseField("index"); + private static final ParseField DOCUMENT_FIELD = new ParseField("document"); + private static final ParseField QUERY_FIELD = new ParseField("query"); + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("execute_script_context", + args -> new ContextSetup((String) args[0], (BytesReference) args[1], (QueryBuilder) args[2])); + + static { + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), INDEX_FIELD); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { + try (XContentBuilder b = XContentBuilder.builder(p.contentType().xContent())) { + b.copyCurrentStructure(p); + return BytesReference.bytes(b); + } + }, DOCUMENT_FIELD); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> + AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY_FIELD); + } + + private final String index; + private final BytesReference document; + private final QueryBuilder query; + + private XContentType xContentType; + + static ContextSetup parse(XContentParser parser, Void context) throws IOException { + ContextSetup contextSetup = PARSER.parse(parser, null); + contextSetup.setXContentType(parser.contentType()); + return contextSetup; + } + + ContextSetup(String index, BytesReference document, QueryBuilder query) { + this.index = index; + this.document = document; + this.query = query; + } + + ContextSetup(StreamInput in) throws IOException { + index = in.readOptionalString(); + document = in.readOptionalBytesReference(); + String xContentType = in.readOptionalString(); + if (xContentType != null) { + this.xContentType = XContentType.fromMediaType(xContentType); + } + query = in.readOptionalNamedWriteable(QueryBuilder.class); + } + + public String getIndex() { + return index; + } + + public BytesReference getDocument() { + return document; + } + + public QueryBuilder getQuery() { + return query; + } + + public XContentType getXContentType() { + return xContentType; + } + + public void setXContentType(XContentType xContentType) { + this.xContentType = xContentType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ContextSetup that = (ContextSetup) o; + return Objects.equals(index, that.index) && + Objects.equals(document, that.document) && + Objects.equals(query, that.query); + } + + @Override + public int hashCode() { + return Objects.hash(index, document, query); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(index); + out.writeOptionalBytesReference(document); + out.writeOptionalString(xContentType != null ? xContentType.mediaType(): null); + out.writeOptionalNamedWriteable(query); + } + + @Override + public String toString() { + return "ContextSetup{" + + ", index='" + index + '\'' + + ", document=" + document + + ", query=" + query + + ", xContentType=" + xContentType + + '}'; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (index != null) { + builder.field(INDEX_FIELD.getPreferredName(), index); + } + if (document != null) { + builder.field(DOCUMENT_FIELD.getPreferredName()); + try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, document, xContentType)) { + builder.generator().copyCurrentStructure(parser); + } + } + if (query != null) { + builder.field(QUERY_FIELD.getPreferredName(), query); + } + } + builder.endObject(); + return builder; + } + } private Script script; - private SupportedContext context; + private ScriptContext context = PainlessTestScript.CONTEXT; + private ContextSetup contextSetup; static Request parse(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } - Request(Script script, SupportedContext context) { + Request(Script script, String scriptContextName, ContextSetup setup) { this.script = Objects.requireNonNull(script); - this.context = context != null ? context : SupportedContext.PAINLESS_TEST; + if (scriptContextName != null) { + this.context = fromScriptContextName(scriptContextName); + } + if (setup != null) { + this.contextSetup = setup; + index(contextSetup.index); + } } Request() { @@ -118,16 +293,28 @@ public Script getScript() { return script; } - public SupportedContext getContext() { + public ScriptContext getContext() { return context; } + public ContextSetup getContextSetup() { + return contextSetup; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (script.getType() != ScriptType.INLINE) { validationException = addValidationError("only inline scripts are supported", validationException); } + if (needDocumentAndIndex(context)) { + if (contextSetup.index == null) { + validationException = addValidationError("index is a required parameter for current context", validationException); + } + if (contextSetup.document == null) { + validationException = addValidationError("document is a required parameter for current context", validationException); + } + } return validationException; } @@ -135,26 +322,35 @@ public ActionRequestValidationException validate() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); script = new Script(in); - context = SupportedContext.fromId(in.readByte()); + if (in.getVersion().before(Version.V_6_4_0)) { + byte scriptContextId = in.readByte(); + assert scriptContextId == 0; + } else { + context = fromScriptContextName(in.readString()); + contextSetup = in.readOptionalWriteable(ContextSetup::new); + } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); script.writeTo(out); - out.writeByte(context.id); + if (out.getVersion().before(Version.V_6_4_0)) { + out.writeByte((byte) 0); + } else { + out.writeString(context.name); + out.writeOptionalWriteable(contextSetup); + } } // For testing only: @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(SCRIPT_FIELD.getPreferredName(), script); - builder.startObject(CONTEXT_FIELD.getPreferredName()); - { - builder.startObject(context.name()); - builder.endObject(); + builder.field(CONTEXT_FIELD.getPreferredName(), context.name); + if (contextSetup != null) { + builder.field(CONTEXT_SETUP_FIELD.getPreferredName(), contextSetup); } - builder.endObject(); return builder; } @@ -164,41 +360,28 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; return Objects.equals(script, request.script) && - context == request.context; + Objects.equals(context, request.context) && + Objects.equals(contextSetup, request.contextSetup); } @Override public int hashCode() { - return Objects.hash(script, context); + return Objects.hash(script, context, contextSetup); } - public enum SupportedContext { - - PAINLESS_TEST((byte) 0); - - private final byte id; - - SupportedContext(byte id) { - this.id = id; - } - - public static SupportedContext fromId(byte id) { - switch (id) { - case 0: - return PAINLESS_TEST; - default: - throw new IllegalArgumentException("unknown context [" + id + "]"); - } - } + @Override + public String toString() { + return "Request{" + + "script=" + script + + "context=" + context + + ", contextSetup=" + contextSetup + + '}'; } - } - - public static class RequestBuilder extends ActionRequestBuilder { - - RequestBuilder(ElasticsearchClient client) { - super(client, INSTANCE, new Request()); + static boolean needDocumentAndIndex(ScriptContext scriptContext) { + return scriptContext == FilterScript.CONTEXT || scriptContext == ScoreScript.CONTEXT; } + } public static class Response extends ActionResponse implements ToXContentObject { @@ -274,31 +457,139 @@ public interface Factory { } - public static class TransportAction extends HandledTransportAction { - + public static class TransportAction extends TransportSingleShardAction { private final ScriptService scriptService; + private final IndicesService indicesServices; @Inject - public TransportAction(Settings settings, TransportService transportService, - ActionFilters actionFilters, ScriptService scriptService) { - super(settings, NAME, transportService, actionFilters, Request::new); + public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + ScriptService scriptService, ClusterService clusterService, IndicesService indicesServices) { + super(settings, NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + // Forking a thread here, because only light weight operations should happen on network thread and + // Creating a in-memory index is not light weight + // TODO: is MANAGEMENT TP the right TP? Right now this is an admin api (see action name). + Request::new, ThreadPool.Names.MANAGEMENT); this.scriptService = scriptService; + this.indicesServices = indicesServices; } + @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - switch (request.context) { - case PAINLESS_TEST: - PainlessTestScript.Factory factory = scriptService.compile(request.script, PainlessTestScript.CONTEXT); - PainlessTestScript painlessTestScript = factory.newInstance(request.script.getParams()); - String result = Objects.toString(painlessTestScript.execute()); - listener.onResponse(new Response(result)); - break; - default: - throw new UnsupportedOperationException("unsupported context [" + request.context + "]"); + protected Response newResponse() { + return new Response(); + } + + @Override + protected ClusterBlockException checkRequestBlock(ClusterState state, InternalRequest request) { + if (request.concreteIndex() != null) { + return super.checkRequestBlock(state, request); } + return null; + } + + @Override + protected boolean resolveIndex(Request request) { + return request.contextSetup != null && request.contextSetup.getIndex() != null; } + @Override + protected ShardsIterator shards(ClusterState state, InternalRequest request) { + if (request.concreteIndex() == null) { + return null; + } + return state.routingTable().index(request.concreteIndex()).randomAllActiveShardsIt(); + } + + @Override + protected Response shardOperation(Request request, ShardId shardId) throws IOException { + IndexService indexService; + if (request.contextSetup != null && request.contextSetup.getIndex() != null) { + ClusterState clusterState = clusterService.state(); + IndicesOptions indicesOptions = IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + String indexExpression = request.contextSetup.index; + Index[] concreteIndices = + indexNameExpressionResolver.concreteIndices(clusterState, indicesOptions, indexExpression); + if (concreteIndices.length != 1) { + throw new IllegalArgumentException("[" + indexExpression + "] does not resolve to a single index"); + } + Index concreteIndex = concreteIndices[0]; + indexService = indicesServices.indexServiceSafe(concreteIndex); + } else { + indexService = null; + } + return innerShardOperation(request, scriptService, indexService); + } + + static Response innerShardOperation(Request request, ScriptService scriptService, IndexService indexService) throws IOException { + final ScriptContext scriptContext = request.context; + if (scriptContext == PainlessTestScript.CONTEXT) { + PainlessTestScript.Factory factory = scriptService.compile(request.script, PainlessTestScript.CONTEXT); + PainlessTestScript painlessTestScript = factory.newInstance(request.script.getParams()); + String result = Objects.toString(painlessTestScript.execute()); + return new Response(result); + } else if (scriptContext == FilterScript.CONTEXT) { + return prepareRamIndex(request, (context, leafReaderContext) -> { + FilterScript.Factory factory = scriptService.compile(request.script, FilterScript.CONTEXT); + FilterScript.LeafFactory leafFactory = + factory.newFactory(request.getScript().getParams(), context.lookup()); + FilterScript filterScript = leafFactory.newInstance(leafReaderContext); + filterScript.setDocument(0); + boolean result = filterScript.execute(); + return new Response(result); + }, indexService); + } else if (scriptContext == ScoreScript.CONTEXT) { + return prepareRamIndex(request, (context, leafReaderContext) -> { + ScoreScript.Factory factory = scriptService.compile(request.script, ScoreScript.CONTEXT); + ScoreScript.LeafFactory leafFactory = + factory.newFactory(request.getScript().getParams(), context.lookup()); + ScoreScript scoreScript = leafFactory.newInstance(leafReaderContext); + scoreScript.setDocument(0); + + if (request.contextSetup.query != null) { + Query luceneQuery = request.contextSetup.query.rewrite(context).toQuery(context); + IndexSearcher indexSearcher = new IndexSearcher(leafReaderContext.reader()); + luceneQuery = indexSearcher.rewrite(luceneQuery); + Weight weight = indexSearcher.createWeight(luceneQuery, true, 1f); + Scorer scorer = weight.scorer(indexSearcher.getIndexReader().leaves().get(0)); + // Consume the first (and only) match. + int docID = scorer.iterator().nextDoc(); + assert docID == scorer.docID(); + scoreScript.setScorer(scorer); + } + + double result = scoreScript.execute(); + return new Response(result); + }, indexService); + } else { + throw new UnsupportedOperationException("unsupported context [" + scriptContext.name + "]"); + } + } + + private static Response prepareRamIndex(Request request, + CheckedBiFunction handler, + IndexService indexService) throws IOException { + + Analyzer defaultAnalyzer = indexService.getIndexAnalyzers().getDefaultIndexAnalyzer(); + + try (RAMDirectory ramDirectory = new RAMDirectory()) { + try (IndexWriter indexWriter = new IndexWriter(ramDirectory, new IndexWriterConfig(defaultAnalyzer))) { + String index = indexService.index().getName(); + String type = indexService.mapperService().documentMapper().type(); + BytesReference document = request.contextSetup.document; + XContentType xContentType = request.contextSetup.xContentType; + SourceToParse sourceToParse = SourceToParse.source(index, type, "_id", document, xContentType); + ParsedDocument parsedDocument = indexService.mapperService().documentMapper().parse(sourceToParse); + indexWriter.addDocuments(parsedDocument.docs()); + try (IndexReader indexReader = DirectoryReader.open(indexWriter)) { + final long absoluteStartMillis = System.currentTimeMillis(); + QueryShardContext context = + indexService.newQueryShardContext(0, indexReader, () -> absoluteStartMillis, null); + return handler.apply(context, indexReader.leaves().get(0)); + } + } + } + } } static class RestAction extends BaseRestHandler { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java index ff2061a9a4b92..6d4b455269616 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java @@ -183,7 +183,7 @@ private MethodArgument methodArgument(PainlessLookup painlessLookup, Class cl private static Class definitionTypeForClass(PainlessLookup painlessLookup, Class type, Function, String> unknownErrorMessageSource) { - type = PainlessLookupUtility.javaObjectTypeToPainlessDefType(type); + type = PainlessLookupUtility.javaTypeToType(type); Class componentType = type; while (componentType.isArray()) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index 7d84899b00e58..57b18bc60da44 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -19,19 +19,20 @@ package org.elasticsearch.painless.lookup; +import org.objectweb.asm.Type; + import java.lang.invoke.MethodHandle; import java.util.Collections; -import java.util.HashMap; import java.util.Map; public final class PainlessClass { public final String name; public final Class clazz; - public final org.objectweb.asm.Type type; + public final Type type; - public final Map constructors; - public final Map staticMethods; - public final Map methods; + public final Map constructors; + public final Map staticMethods; + public final Map methods; public final Map staticMembers; public final Map members; @@ -41,63 +42,25 @@ public final class PainlessClass { public final PainlessMethod functionalMethod; - PainlessClass(String name, Class clazz, org.objectweb.asm.Type type) { + PainlessClass(String name, Class clazz, Type type, + Map constructors, Map staticMethods, Map methods, + Map staticMembers, Map members, + Map getters, Map setters, + PainlessMethod functionalMethod) { this.name = name; this.clazz = clazz; this.type = type; - constructors = new HashMap<>(); - staticMethods = new HashMap<>(); - methods = new HashMap<>(); - - staticMembers = new HashMap<>(); - members = new HashMap<>(); - - getters = new HashMap<>(); - setters = new HashMap<>(); - - functionalMethod = null; - } - - private PainlessClass(PainlessClass struct, PainlessMethod functionalMethod) { - name = struct.name; - clazz = struct.clazz; - type = struct.type; + this.constructors = Collections.unmodifiableMap(constructors); + this.staticMethods = Collections.unmodifiableMap(staticMethods); + this.methods = Collections.unmodifiableMap(methods); - constructors = Collections.unmodifiableMap(struct.constructors); - staticMethods = Collections.unmodifiableMap(struct.staticMethods); - methods = Collections.unmodifiableMap(struct.methods); + this.staticMembers = Collections.unmodifiableMap(staticMembers); + this.members = Collections.unmodifiableMap(members); - staticMembers = Collections.unmodifiableMap(struct.staticMembers); - members = Collections.unmodifiableMap(struct.members); - - getters = Collections.unmodifiableMap(struct.getters); - setters = Collections.unmodifiableMap(struct.setters); + this.getters = Collections.unmodifiableMap(getters); + this.setters = Collections.unmodifiableMap(setters); this.functionalMethod = functionalMethod; } - - public PainlessClass freeze(PainlessMethod functionalMethod) { - return new PainlessClass(this, functionalMethod); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessClass struct = (PainlessClass)object; - - return name.equals(struct.name); - } - - @Override - public int hashCode() { - return name.hashCode(); - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java new file mode 100644 index 0000000000000..0eda3660f0b82 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +import org.objectweb.asm.Type; + +import java.lang.invoke.MethodHandle; +import java.util.HashMap; +import java.util.Map; + +final class PainlessClassBuilder { + final String name; + final Class clazz; + final Type type; + + final Map constructors; + final Map staticMethods; + final Map methods; + + final Map staticMembers; + final Map members; + + final Map getters; + final Map setters; + + PainlessMethod functionalMethod; + + PainlessClassBuilder(String name, Class clazz, Type type) { + this.name = name; + this.clazz = clazz; + this.type = type; + + constructors = new HashMap<>(); + staticMethods = new HashMap<>(); + methods = new HashMap<>(); + + staticMembers = new HashMap<>(); + members = new HashMap<>(); + + getters = new HashMap<>(); + setters = new HashMap<>(); + + functionalMethod = null; + } + + PainlessClass build() { + return new PainlessClass(name, clazz, type, + constructors, staticMethods, methods, + staticMembers, members, + getters, setters, + functionalMethod); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java index 7c85bd269b461..f316e1438ecb9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java @@ -23,18 +23,18 @@ public final class PainlessField { public final String name; - public final PainlessClass owner; + public final Class target; public final Class clazz; public final String javaName; public final int modifiers; public final MethodHandle getter; public final MethodHandle setter; - PainlessField(String name, String javaName, PainlessClass owner, Class clazz, int modifiers, + PainlessField(String name, String javaName, Class target, Class clazz, int modifiers, MethodHandle getter, MethodHandle setter) { this.name = name; this.javaName = javaName; - this.owner = owner; + this.target = target; this.clazz = clazz; this.modifiers = modifiers; this.getter = getter; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index 6111d12317b18..752c0c205dd89 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -54,6 +54,6 @@ public PainlessClass getPainlessStructFromJavaClass(Class clazz) { } public Class getJavaClassFromPainlessType(String painlessType) { - return PainlessLookupUtility.painlessTypeNameToPainlessType(painlessType, painlessTypesToJavaClasses); + return PainlessLookupUtility.canonicalTypeNameToType(painlessType, painlessTypesToJavaClasses); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 9a5e08d65a754..06773d3ffddf9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -34,234 +34,201 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Stack; import java.util.regex.Pattern; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_TYPE_NAME; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey; + public class PainlessLookupBuilder { - private static final Pattern TYPE_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); - private static final Map methodCache = new HashMap<>(); - private static final Map fieldCache = new HashMap<>(); + private static class PainlessMethodCacheKey { - private static String buildMethodCacheKey(String structName, String methodName, List> arguments) { - StringBuilder key = new StringBuilder(); - key.append(structName); - key.append(methodName); + private final Class targetType; + private final String methodName; + private final List> typeParameters; - for (Class argument : arguments) { - key.append(argument.getName()); + private PainlessMethodCacheKey(Class targetType, String methodName, List> typeParameters) { + this.targetType = targetType; + this.methodName = methodName; + this.typeParameters = Collections.unmodifiableList(typeParameters); } - return key.toString(); - } - - private static String buildFieldCacheKey(String structName, String fieldName, String typeName) { - return structName + fieldName + typeName; - } + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } - private final Map> painlessTypesToJavaClasses; - private final Map, PainlessClass> javaClassesToPainlessStructs; + if (object == null || getClass() != object.getClass()) { + return false; + } - public PainlessLookupBuilder(List whitelists) { - painlessTypesToJavaClasses = new HashMap<>(); - javaClassesToPainlessStructs = new HashMap<>(); + PainlessMethodCacheKey that = (PainlessMethodCacheKey)object; - String origin = null; + return Objects.equals(targetType, that.targetType) && + Objects.equals(methodName, that.methodName) && + Objects.equals(typeParameters, that.typeParameters); + } - painlessTypesToJavaClasses.put("def", def.class); - javaClassesToPainlessStructs.put(def.class, new PainlessClass("def", Object.class, Type.getType(Object.class))); + @Override + public int hashCode() { + return Objects.hash(targetType, methodName, typeParameters); + } + } - try { - // first iteration collects all the Painless type names that - // are used for validation during the second iteration - for (Whitelist whitelist : whitelists) { - for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { - String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); - PainlessClass painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName)); + private static class PainlessFieldCacheKey { - if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) { - throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " + - "[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]"); - } + private final Class targetType; + private final String fieldName; + private final Class typeParameter; - origin = whitelistStruct.origin; - addStruct(whitelist.javaClassLoader, whitelistStruct); + private PainlessFieldCacheKey(Class targetType, String fieldName, Class typeParameter) { + this.targetType = targetType; + this.fieldName = fieldName; + this.typeParameter = typeParameter; + } - painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName)); - javaClassesToPainlessStructs.put(painlessStruct.clazz, painlessStruct); - } + @Override + public boolean equals(Object object) { + if (this == object) { + return true; } - // second iteration adds all the constructors, methods, and fields that will - // be available in Painless along with validating they exist and all their types have - // been white-listed during the first iteration - for (Whitelist whitelist : whitelists) { - for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { - String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); - - for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) { - origin = whitelistConstructor.origin; - addConstructor(painlessTypeName, whitelistConstructor); - } - - for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) { - origin = whitelistMethod.origin; - addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod); - } - - for (WhitelistField whitelistField : whitelistStruct.whitelistFields) { - origin = whitelistField.origin; - addField(painlessTypeName, whitelistField); - } - } + if (object == null || getClass() != object.getClass()) { + return false; } - } catch (Exception exception) { - throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception); - } - // goes through each Painless struct and determines the inheritance list, - // and then adds all inherited types to the Painless struct's whitelist - for (Class javaClass : javaClassesToPainlessStructs.keySet()) { - PainlessClass painlessStruct = javaClassesToPainlessStructs.get(javaClass); - - List painlessSuperStructs = new ArrayList<>(); - Class javaSuperClass = painlessStruct.clazz.getSuperclass(); + PainlessFieldCacheKey that = (PainlessFieldCacheKey) object; - Stack> javaInteraceLookups = new Stack<>(); - javaInteraceLookups.push(painlessStruct.clazz); + return Objects.equals(targetType, that.targetType) && + Objects.equals(fieldName, that.fieldName) && + Objects.equals(typeParameter, that.typeParameter); + } - // adds super classes to the inheritance list - if (javaSuperClass != null && javaSuperClass.isInterface() == false) { - while (javaSuperClass != null) { - PainlessClass painlessSuperStruct = javaClassesToPainlessStructs.get(javaSuperClass); + @Override + public int hashCode() { + return Objects.hash(targetType, fieldName, typeParameter); + } + } - if (painlessSuperStruct != null) { - painlessSuperStructs.add(painlessSuperStruct.name); - } + private static final Map painlessMethodCache = new HashMap<>(); + private static final Map painlessFieldCache = new HashMap<>(); - javaInteraceLookups.push(javaSuperClass); - javaSuperClass = javaSuperClass.getSuperclass(); - } - } + private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); + private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); + private static final Pattern FIELD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); - // adds all super interfaces to the inheritance list - while (javaInteraceLookups.isEmpty() == false) { - Class javaInterfaceLookup = javaInteraceLookups.pop(); + private final List whitelists; - for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) { - PainlessClass painlessInterfaceStruct = javaClassesToPainlessStructs.get(javaSuperInterface); + private final Map> canonicalClassNamesToClasses; + private final Map, PainlessClassBuilder> classesToPainlessClasses; - if (painlessInterfaceStruct != null) { - String painlessInterfaceStructName = painlessInterfaceStruct.name; + public PainlessLookupBuilder(List whitelists) { + this.whitelists = whitelists; - if (painlessSuperStructs.contains(painlessInterfaceStructName) == false) { - painlessSuperStructs.add(painlessInterfaceStructName); - } + canonicalClassNamesToClasses = new HashMap<>(); + classesToPainlessClasses = new HashMap<>(); - for (Class javaPushInterface : javaInterfaceLookup.getInterfaces()) { - javaInteraceLookups.push(javaPushInterface); - } - } - } - } + canonicalClassNamesToClasses.put(DEF_TYPE_NAME, def.class); + classesToPainlessClasses.put(def.class, + new PainlessClassBuilder(DEF_TYPE_NAME, Object.class, Type.getType(Object.class))); + } - // copies methods and fields from super structs to the parent struct - copyStruct(painlessStruct.name, painlessSuperStructs); + private Class canonicalTypeNameToType(String canonicalTypeName) { + return PainlessLookupUtility.canonicalTypeNameToType(canonicalTypeName, canonicalClassNamesToClasses); + } - // copies methods and fields from Object into interface types - if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) { - PainlessClass painlessObjectStruct = javaClassesToPainlessStructs.get(Object.class); + private void validateType(Class type) { + PainlessLookupUtility.validateType(type, classesToPainlessClasses.keySet()); + } - if (painlessObjectStruct != null) { - copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name)); - } + public void addPainlessClass(ClassLoader classLoader, String javaClassName, boolean importClassName) { + Objects.requireNonNull(classLoader); + Objects.requireNonNull(javaClassName); + + Class clazz; + + if ("void".equals(javaClassName)) clazz = void.class; + else if ("boolean".equals(javaClassName)) clazz = boolean.class; + else if ("byte".equals(javaClassName)) clazz = byte.class; + else if ("short".equals(javaClassName)) clazz = short.class; + else if ("char".equals(javaClassName)) clazz = char.class; + else if ("int".equals(javaClassName)) clazz = int.class; + else if ("long".equals(javaClassName)) clazz = long.class; + else if ("float".equals(javaClassName)) clazz = float.class; + else if ("double".equals(javaClassName)) clazz = double.class; + else { + try { + clazz = Class.forName(javaClassName, true, classLoader); + } catch (ClassNotFoundException cnfe) { + throw new IllegalArgumentException("class [" + javaClassName + "] not found", cnfe); } } - // precompute runtime classes - for (PainlessClass painlessStruct : javaClassesToPainlessStructs.values()) { - addRuntimeClass(painlessStruct); - } - - // copy all structs to make them unmodifiable for outside users: - for (Map.Entry,PainlessClass> entry : javaClassesToPainlessStructs.entrySet()) { - entry.setValue(entry.getValue().freeze(computeFunctionalInterfaceMethod(entry.getValue()))); - } + addPainlessClass(clazz, importClassName); } - private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelistStruct) { - String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); - String importedPainlessTypeName = painlessTypeName; + public void addPainlessClass(Class clazz, boolean importClassName) { + Objects.requireNonNull(clazz); - if (TYPE_NAME_PATTERN.matcher(painlessTypeName).matches() == false) { - throw new IllegalArgumentException("invalid struct type name [" + painlessTypeName + "]"); + if (clazz == def.class) { + throw new IllegalArgumentException("cannot add reserved class [" + DEF_TYPE_NAME + "]"); } - int index = whitelistStruct.javaClassName.lastIndexOf('.'); + String canonicalClassName = clazz.getCanonicalName(); - if (index != -1) { - importedPainlessTypeName = whitelistStruct.javaClassName.substring(index + 1).replace('$', '.'); + if (clazz.isArray()) { + throw new IllegalArgumentException("cannot add array type [" + canonicalClassName + "] as a class"); } - Class javaClass; - - if ("void".equals(whitelistStruct.javaClassName)) javaClass = void.class; - else if ("boolean".equals(whitelistStruct.javaClassName)) javaClass = boolean.class; - else if ("byte".equals(whitelistStruct.javaClassName)) javaClass = byte.class; - else if ("short".equals(whitelistStruct.javaClassName)) javaClass = short.class; - else if ("char".equals(whitelistStruct.javaClassName)) javaClass = char.class; - else if ("int".equals(whitelistStruct.javaClassName)) javaClass = int.class; - else if ("long".equals(whitelistStruct.javaClassName)) javaClass = long.class; - else if ("float".equals(whitelistStruct.javaClassName)) javaClass = float.class; - else if ("double".equals(whitelistStruct.javaClassName)) javaClass = double.class; - else { - try { - javaClass = Class.forName(whitelistStruct.javaClassName, true, whitelistClassLoader); - } catch (ClassNotFoundException cnfe) { - throw new IllegalArgumentException("invalid java class name [" + whitelistStruct.javaClassName + "]" + - " for struct [" + painlessTypeName + "]"); - } + if (CLASS_NAME_PATTERN.matcher(canonicalClassName).matches() == false) { + throw new IllegalArgumentException("invalid class name [" + canonicalClassName + "]"); } - PainlessClass existingStruct = javaClassesToPainlessStructs.get(javaClass); + PainlessClassBuilder existingPainlessClassBuilder = classesToPainlessClasses.get(clazz); + + if (existingPainlessClassBuilder == null) { + PainlessClassBuilder painlessClassBuilder = new PainlessClassBuilder(canonicalClassName, clazz, Type.getType(clazz)); - if (existingStruct == null) { - PainlessClass struct = new PainlessClass(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass)); - painlessTypesToJavaClasses.put(painlessTypeName, javaClass); - javaClassesToPainlessStructs.put(javaClass, struct); - } else if (existingStruct.clazz.equals(javaClass) == false) { - throw new IllegalArgumentException("struct [" + painlessTypeName + "] is used to " + - "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] and " + - "[" + existingStruct.clazz.getName() + "]"); + canonicalClassNamesToClasses.put(canonicalClassName, clazz); + classesToPainlessClasses.put(clazz, painlessClassBuilder); + } else if (existingPainlessClassBuilder.clazz.equals(clazz) == false) { + throw new IllegalArgumentException("class [" + canonicalClassName + "] " + + "cannot represent multiple java classes with the same name from different class loaders"); } - if (painlessTypeName.equals(importedPainlessTypeName)) { - if (whitelistStruct.onlyFQNJavaClassName == false) { - throw new IllegalArgumentException("must use only_fqn parameter on type [" + painlessTypeName + "] with no package"); + String javaClassName = clazz.getName(); + String importedCanonicalClassName = javaClassName.substring(javaClassName.lastIndexOf('.') + 1).replace('$', '.'); + + if (canonicalClassName.equals(importedCanonicalClassName)) { + if (importClassName == true) { + throw new IllegalArgumentException("must use only_fqn parameter on class [" + canonicalClassName + "] with no package"); } } else { - Class importedJavaClass = painlessTypesToJavaClasses.get(importedPainlessTypeName); + Class importedPainlessType = canonicalClassNamesToClasses.get(importedCanonicalClassName); - if (importedJavaClass == null) { - if (whitelistStruct.onlyFQNJavaClassName == false) { - if (existingStruct != null) { - throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]"); + if (importedPainlessType == null) { + if (importClassName) { + if (existingPainlessClassBuilder != null) { + throw new IllegalArgumentException( + "inconsistent only_fqn parameters found for painless type [" + canonicalClassName + "]"); } - painlessTypesToJavaClasses.put(importedPainlessTypeName, javaClass); + canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz); } - } else if (importedJavaClass.equals(javaClass) == false) { - throw new IllegalArgumentException("imported name [" + painlessTypeName + "] is used to " + - "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] " + - "and [" + importedJavaClass.getName() + "]"); - } else if (whitelistStruct.onlyFQNJavaClassName) { - throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]"); + } else if (importedPainlessType.equals(clazz) == false) { + throw new IllegalArgumentException("painless type [" + importedCanonicalClassName + "] illegally represents multiple " + + "java types [" + clazz.getCanonicalName() + "] and [" + importedPainlessType.getCanonicalName() + "]"); + } else if (importClassName == false) { + throw new IllegalArgumentException("inconsistent only_fqn parameters found for painless type [" + canonicalClassName + "]"); } } } private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) { - PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " + @@ -275,10 +242,10 @@ private void addConstructor(String ownerStructName, WhitelistConstructor whiteli String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount); try { - Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName); + Class painlessParameterClass = canonicalTypeNameToType(painlessParameterTypeName); painlessParametersTypes.add(painlessParameterClass); - javaClassParameters[parameterCount] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessParameterClass); + javaClassParameters[parameterCount] = PainlessLookupUtility.typeToJavaType(painlessParameterClass); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " + "with owner struct [" + ownerStructName + "] and constructor parameters " + @@ -295,7 +262,7 @@ private void addConstructor(String ownerStructName, WhitelistConstructor whiteli " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception); } - PainlessMethodKey painlessMethodKey = new PainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size()); + String painlessMethodKey = buildPainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size()); PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey); if (painlessConstructor == null) { @@ -309,8 +276,9 @@ private void addConstructor(String ownerStructName, WhitelistConstructor whiteli " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames); } - painlessConstructor = methodCache.computeIfAbsent(buildMethodCacheKey(ownerStruct.name, "", painlessParametersTypes), - key -> new PainlessMethod("", ownerStruct, null, void.class, painlessParametersTypes, + painlessConstructor = painlessMethodCache.computeIfAbsent( + new PainlessMethodCacheKey(ownerStruct.clazz, "", painlessParametersTypes), + key -> new PainlessMethod("", ownerStruct.clazz, null, void.class, painlessParametersTypes, asmConstructor, javaConstructor.getModifiers(), javaHandle)); ownerStruct.constructors.put(painlessMethodKey, painlessConstructor); } else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){ @@ -321,14 +289,14 @@ private void addConstructor(String ownerStructName, WhitelistConstructor whiteli } private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) { - PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + "name [" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames); } - if (TYPE_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) { + if (METHOD_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) { throw new IllegalArgumentException("invalid method name" + " [" + whitelistMethod.javaMethodName + "] for owner struct [" + ownerStructName + "]."); } @@ -360,11 +328,11 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount); try { - Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName); + Class painlessParameterClass = canonicalTypeNameToType(painlessParameterTypeName); painlessParametersTypes.add(painlessParameterClass); javaClassParameters[parameterCount + augmentedOffset] = - PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessParameterClass); + PainlessLookupUtility.typeToJavaType(painlessParameterClass); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " + "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " + @@ -386,22 +354,22 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, Class painlessReturnClass; try { - painlessReturnClass = getJavaClassFromPainlessType(whitelistMethod.painlessReturnTypeName); + painlessReturnClass = canonicalTypeNameToType(whitelistMethod.painlessReturnTypeName); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " + "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " + "and parameters " + whitelistMethod.painlessParameterTypeNames, iae); } - if (javaMethod.getReturnType() != PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessReturnClass)) { + if (javaMethod.getReturnType() != PainlessLookupUtility.typeToJavaType(painlessReturnClass)) { throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " + "does not match the return type class [" + javaMethod.getReturnType() + "] for the " + "method with name [" + whitelistMethod.javaMethodName + "] " + "and parameters " + whitelistMethod.painlessParameterTypeNames); } - PainlessMethodKey painlessMethodKey = - new PainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size()); + String painlessMethodKey = + buildPainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size()); if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) { PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey); @@ -417,9 +385,9 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames); } - painlessMethod = methodCache.computeIfAbsent( - buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes), - key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, null, painlessReturnClass, + painlessMethod = painlessMethodCache.computeIfAbsent( + new PainlessMethodCacheKey(ownerStruct.clazz, whitelistMethod.javaMethodName, painlessParametersTypes), + key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, null, painlessReturnClass, painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod); } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass && @@ -443,9 +411,9 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames); } - painlessMethod = methodCache.computeIfAbsent( - buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes), - key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, javaAugmentedClass, painlessReturnClass, + painlessMethod = painlessMethodCache.computeIfAbsent( + new PainlessMethodCacheKey(ownerStruct.clazz, whitelistMethod.javaMethodName, painlessParametersTypes), + key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, javaAugmentedClass, painlessReturnClass, painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); ownerStruct.methods.put(painlessMethodKey, painlessMethod); } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) && @@ -459,14 +427,14 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, } private void addField(String ownerStructName, WhitelistField whitelistField) { - PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + "name [" + whitelistField.javaFieldName + "] and type " + whitelistField.painlessFieldTypeName); } - if (TYPE_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) { + if (FIELD_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) { throw new IllegalArgumentException("invalid field name " + "[" + whitelistField.painlessFieldTypeName + "] for owner struct [" + ownerStructName + "]."); } @@ -483,7 +451,7 @@ private void addField(String ownerStructName, WhitelistField whitelistField) { Class painlessFieldClass; try { - painlessFieldClass = getJavaClassFromPainlessType(whitelistField.painlessFieldTypeName); + painlessFieldClass = canonicalTypeNameToType(whitelistField.painlessFieldTypeName); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " + "with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae); @@ -498,10 +466,10 @@ private void addField(String ownerStructName, WhitelistField whitelistField) { PainlessField painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName); if (painlessField == null) { - painlessField = fieldCache.computeIfAbsent( - buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()), + painlessField = painlessFieldCache.computeIfAbsent( + new PainlessFieldCacheKey(ownerStruct.clazz, whitelistField.javaFieldName, painlessFieldClass), key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), - ownerStruct, painlessFieldClass, javaField.getModifiers(), null, null)); + ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), null, null)); ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField); } else if (painlessField.clazz != painlessFieldClass) { throw new IllegalArgumentException("illegal duplicate static fields [" + whitelistField.javaFieldName + "] " + @@ -527,10 +495,10 @@ private void addField(String ownerStructName, WhitelistField whitelistField) { PainlessField painlessField = ownerStruct.members.get(whitelistField.javaFieldName); if (painlessField == null) { - painlessField = fieldCache.computeIfAbsent( - buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()), + painlessField = painlessFieldCache.computeIfAbsent( + new PainlessFieldCacheKey(ownerStruct.clazz, whitelistField.javaFieldName, painlessFieldClass), key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), - ownerStruct, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter)); + ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter)); ownerStruct.members.put(whitelistField.javaFieldName, painlessField); } else if (painlessField.clazz != painlessFieldClass) { throw new IllegalArgumentException("illegal duplicate member fields [" + whitelistField.javaFieldName + "] " + @@ -540,14 +508,15 @@ private void addField(String ownerStructName, WhitelistField whitelistField) { } private void copyStruct(String struct, List children) { - final PainlessClass owner = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(struct)); + final PainlessClassBuilder owner = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(struct)); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy."); } for (int count = 0; count < children.size(); ++count) { - final PainlessClass child = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(children.get(count))); + final PainlessClassBuilder child = + classesToPainlessClasses.get(canonicalClassNamesToClasses.get(children.get(count))); if (child == null) { throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" + @@ -559,8 +528,8 @@ private void copyStruct(String struct, List children) { " is not a super type of owner struct [" + owner.name + "] in copy."); } - for (Map.Entry kvPair : child.methods.entrySet()) { - PainlessMethodKey methodKey = kvPair.getKey(); + for (Map.Entry kvPair : child.methods.entrySet()) { + String methodKey = kvPair.getKey(); PainlessMethod method = kvPair.getValue(); if (owner.methods.get(methodKey) == null) { // TODO: some of these are no longer valid or outright don't work @@ -615,8 +584,8 @@ private void copyStruct(String struct, List children) { for (PainlessField field : child.members.values()) { if (owner.members.get(field.name) == null) { - owner.members.put(field.name, - new PainlessField(field.name, field.javaName, owner, field.clazz, field.modifiers, field.getter, field.setter)); + owner.members.put(field.name, new PainlessField( + field.name, field.javaName, owner.clazz, field.clazz, field.modifiers, field.getter, field.setter)); } } } @@ -625,10 +594,10 @@ private void copyStruct(String struct, List children) { /** * Precomputes a more efficient structure for dynamic method/field access. */ - private void addRuntimeClass(final PainlessClass struct) { + private void addRuntimeClass(final PainlessClassBuilder struct) { // add all getters/setters - for (Map.Entry method : struct.methods.entrySet()) { - String name = method.getKey().name; + for (Map.Entry method : struct.methods.entrySet()) { + String name = method.getValue().name; PainlessMethod m = method.getValue(); if (m.arguments.size() == 0 && @@ -668,7 +637,7 @@ private void addRuntimeClass(final PainlessClass struct) { } /** computes the functional interface method for a class, or returns null */ - private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) { + private PainlessMethod computeFunctionalInterfaceMethod(PainlessClassBuilder clazz) { if (!clazz.clazz.isInterface()) { return null; } @@ -703,7 +672,7 @@ private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) { } // inspect the one method found from the reflection API, it should match the whitelist! java.lang.reflect.Method oneMethod = methods.get(0); - PainlessMethod painless = clazz.methods.get(new PainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount())); + PainlessMethod painless = clazz.methods.get(buildPainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount())); if (painless == null || painless.method.equals(org.objectweb.asm.commons.Method.getMethod(oneMethod)) == false) { throw new IllegalArgumentException("Class: " + clazz.name + " is functional but the functional " + "method is not whitelisted!"); @@ -712,10 +681,130 @@ private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) { } public PainlessLookup build() { - return new PainlessLookup(painlessTypesToJavaClasses, javaClassesToPainlessStructs); - } + String origin = "internal error"; + + try { + // first iteration collects all the Painless type names that + // are used for validation during the second iteration + for (Whitelist whitelist : whitelists) { + for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { + String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); + PainlessClassBuilder painlessStruct = + classesToPainlessClasses.get(canonicalClassNamesToClasses.get(painlessTypeName)); + + if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) { + throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " + + "[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]"); + } + + origin = whitelistStruct.origin; + addPainlessClass( + whitelist.javaClassLoader, whitelistStruct.javaClassName, whitelistStruct.onlyFQNJavaClassName == false); + + painlessStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(painlessTypeName)); + classesToPainlessClasses.put(painlessStruct.clazz, painlessStruct); + } + } + + // second iteration adds all the constructors, methods, and fields that will + // be available in Painless along with validating they exist and all their types have + // been white-listed during the first iteration + for (Whitelist whitelist : whitelists) { + for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { + String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); + + for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) { + origin = whitelistConstructor.origin; + addConstructor(painlessTypeName, whitelistConstructor); + } + + for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) { + origin = whitelistMethod.origin; + addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod); + } + + for (WhitelistField whitelistField : whitelistStruct.whitelistFields) { + origin = whitelistField.origin; + addField(painlessTypeName, whitelistField); + } + } + } + } catch (Exception exception) { + throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception); + } + + // goes through each Painless struct and determines the inheritance list, + // and then adds all inherited types to the Painless struct's whitelist + for (Class javaClass : classesToPainlessClasses.keySet()) { + PainlessClassBuilder painlessStruct = classesToPainlessClasses.get(javaClass); + + List painlessSuperStructs = new ArrayList<>(); + Class javaSuperClass = painlessStruct.clazz.getSuperclass(); + + Stack> javaInteraceLookups = new Stack<>(); + javaInteraceLookups.push(painlessStruct.clazz); + + // adds super classes to the inheritance list + if (javaSuperClass != null && javaSuperClass.isInterface() == false) { + while (javaSuperClass != null) { + PainlessClassBuilder painlessSuperStruct = classesToPainlessClasses.get(javaSuperClass); + + if (painlessSuperStruct != null) { + painlessSuperStructs.add(painlessSuperStruct.name); + } + + javaInteraceLookups.push(javaSuperClass); + javaSuperClass = javaSuperClass.getSuperclass(); + } + } + + // adds all super interfaces to the inheritance list + while (javaInteraceLookups.isEmpty() == false) { + Class javaInterfaceLookup = javaInteraceLookups.pop(); + + for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) { + PainlessClassBuilder painlessInterfaceStruct = classesToPainlessClasses.get(javaSuperInterface); + + if (painlessInterfaceStruct != null) { + String painlessInterfaceStructName = painlessInterfaceStruct.name; + + if (painlessSuperStructs.contains(painlessInterfaceStructName) == false) { + painlessSuperStructs.add(painlessInterfaceStructName); + } + + for (Class javaPushInterface : javaInterfaceLookup.getInterfaces()) { + javaInteraceLookups.push(javaPushInterface); + } + } + } + } + + // copies methods and fields from super structs to the parent struct + copyStruct(painlessStruct.name, painlessSuperStructs); + + // copies methods and fields from Object into interface types + if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) { + PainlessClassBuilder painlessObjectStruct = classesToPainlessClasses.get(Object.class); + + if (painlessObjectStruct != null) { + copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name)); + } + } + } + + // precompute runtime classes + for (PainlessClassBuilder painlessStruct : classesToPainlessClasses.values()) { + addRuntimeClass(painlessStruct); + } + + Map, PainlessClass> javaClassesToPainlessClasses = new HashMap<>(); + + // copy all structs to make them unmodifiable for outside users: + for (Map.Entry,PainlessClassBuilder> entry : classesToPainlessClasses.entrySet()) { + entry.getValue().functionalMethod = computeFunctionalInterfaceMethod(entry.getValue()); + javaClassesToPainlessClasses.put(entry.getKey(), entry.getValue().build()); + } - public Class getJavaClassFromPainlessType(String painlessType) { - return PainlessLookupUtility.painlessTypeNameToPainlessType(painlessType, painlessTypesToJavaClasses); + return new PainlessLookup(canonicalClassNamesToClasses, javaClassesToPainlessClasses); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java index d1f3ee4ece3e0..1f698b7c673f5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -19,264 +19,344 @@ package org.elasticsearch.painless.lookup; -import org.objectweb.asm.Type; - import java.util.Arrays; import java.util.Collection; +import java.util.List; import java.util.Map; +import java.util.Objects; /** - * This class contains methods shared by {@link PainlessLookupBuilder}, {@link PainlessLookup}, and other classes within + * PainlessLookupUtility contains methods shared by {@link PainlessLookupBuilder}, {@link PainlessLookup}, and other classes within * Painless for conversion between type names and types along with some other various utility methods. * * The following terminology is used for variable names throughout the lookup package: * - * - javaClass (Class) - a java class including def and excluding array type java classes - * - javaClassName (String) - the fully qualified java class name for a javaClass - * - painlessClassName (String) - the fully qualified painless name or imported painless name for a painlessClass - * - anyClassName (String) - either a javaClassName or a painlessClassName - * - javaType (Class) - a java class excluding def and array type java classes - * - painlessType (Class) - a java class including def and array type java classes - * - javaTypeName (String) - the fully qualified java Type name for a javaType - * - painlessTypeName (String) - the fully qualified painless name or imported painless name for a painlessType - * - anyTypeName (String) - either a javaTypeName or a painlessTypeName - * - painlessClass (PainlessClass) - a painless class object + * A class is a set of methods and fields under a specific class name. A type is either a class or an array under a specific type name. + * Note the distinction between class versus type is class means that no array classes will be be represented whereas type allows array + * classes to be represented. The set of available classes will always be a subset of the available types. + * + * Under ambiguous circumstances most variable names are prefixed with asm, java, or painless. If the variable value is the same for asm, + * java, and painless, no prefix is used. + * + *
    + *
  • - javaClassName (String) - the fully qualified java class name where '$' tokens represent inner classes excluding + * def and array types
  • + * + *
  • - javaClass (Class) - a java class excluding def and array types
  • + * + *
  • - javaType (Class) - a java class excluding def and including array types
  • + * + *
  • - importedClassName (String) - the imported painless class name where the java canonical class name is used without + * the package qualifier + * + *
  • - canonicalClassName (String) - the fully qualified painless class name equivalent to the fully + * qualified java canonical class name or imported painless class name for a class + * including def and excluding array types where '.' tokens represent inner classes
  • * - * Under ambiguous circumstances most variable names are prefixed with asm, java, or painless. - * If the variable name is the same for asm, java, and painless, no prefix is used. + *
  • - canonicalTypeName (String) - the fully qualified painless type name equivalent to the fully + * qualified java canonical type name or imported painless type name for a type + * including def where '.' tokens represent inner classes and each set of '[]' tokens + * at the end of the type name represent a single dimension for an array type
  • + * + *
  • - class/clazz (Class) - a painless class represented by a java class including def and excluding array + * types
  • + * + *
  • - type (Class) - a painless type represented by a java class including def and array types
  • + * + *
  • - painlessClass (PainlessClass) - a painless class object
  • + * + *
  • - painlessMethod (PainlessMethod) - a painless method object
  • + * + *
  • - painlessField (PainlessField) - a painless field object
  • + *
*/ public final class PainlessLookupUtility { - public static Class javaObjectTypeToPainlessDefType(Class javaType) { - if (javaType.isArray()) { - Class javaTypeComponent = javaType.getComponentType(); - int arrayDimensions = 1; + /** + * Converts a canonical type name to a type based on the terminology specified as part of the documentation for + * {@link PainlessLookupUtility}. Since canonical class names are a subset of canonical type names, this method will + * safely convert a canonical class name to a class as well. + */ + public static Class canonicalTypeNameToType(String canonicalTypeName, Map> canonicalClassNamesToClasses) { + Objects.requireNonNull(canonicalTypeName); + Objects.requireNonNull(canonicalClassNamesToClasses); - while (javaTypeComponent.isArray()) { - javaTypeComponent = javaTypeComponent.getComponentType(); - ++arrayDimensions; - } + Class type = canonicalClassNamesToClasses.get(canonicalTypeName); - if (javaTypeComponent == Object.class) { - char[] asmDescriptorBraces = new char[arrayDimensions]; - Arrays.fill(asmDescriptorBraces, '['); + if (type != null) { + return type; + } - String asmDescriptor = new String(asmDescriptorBraces) + Type.getType(def.class).getDescriptor(); - Type asmType = Type.getType(asmDescriptor); + int arrayDimensions = 0; + int arrayIndex = canonicalTypeName.indexOf('['); - try { - return Class.forName(asmType.getInternalName().replace('/', '.')); - } catch (ClassNotFoundException cnfe) { - throw new IllegalStateException("internal error", cnfe); + if (arrayIndex != -1) { + int typeNameLength = canonicalTypeName.length(); + + while (arrayIndex < typeNameLength) { + if (canonicalTypeName.charAt(arrayIndex) == '[' && + ++arrayIndex < typeNameLength && + canonicalTypeName.charAt(arrayIndex++) == ']') { + ++arrayDimensions; + } else { + throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found"); } } - } else if (javaType == Object.class) { - return def.class; + + canonicalTypeName = canonicalTypeName.substring(0, canonicalTypeName.indexOf('[')); + type = canonicalClassNamesToClasses.get(canonicalTypeName); + + char arrayBraces[] = new char[arrayDimensions]; + Arrays.fill(arrayBraces, '['); + String javaTypeName = new String(arrayBraces); + + if (type == boolean.class) { + javaTypeName += "Z"; + } else if (type == byte.class) { + javaTypeName += "B"; + } else if (type == short.class) { + javaTypeName += "S"; + } else if (type == char.class) { + javaTypeName += "C"; + } else if (type == int.class) { + javaTypeName += "I"; + } else if (type == long.class) { + javaTypeName += "J"; + } else if (type == float.class) { + javaTypeName += "F"; + } else if (type == double.class) { + javaTypeName += "D"; + } else { + javaTypeName += "L" + type.getName() + ";"; + } + + try { + return Class.forName(javaTypeName); + } catch (ClassNotFoundException cnfe) { + throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found", cnfe); + } } - return javaType; + throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found"); } - public static Class painlessDefTypeToJavaObjectType(Class painlessType) { - if (painlessType.isArray()) { - Class painlessTypeComponent = painlessType.getComponentType(); - int arrayDimensions = 1; + /** + * Converts a type to a canonical type name based on the terminology specified as part of the documentation for + * {@link PainlessLookupUtility}. Since classes are a subset of types, this method will safely convert a class + * to a canonical class name as well. + */ + public static String typeToCanonicalTypeName(Class type) { + Objects.requireNonNull(type); - while (painlessTypeComponent.isArray()) { - painlessTypeComponent = painlessTypeComponent.getComponentType(); - ++arrayDimensions; - } + String canonicalTypeName = type.getCanonicalName(); + + if (canonicalTypeName.startsWith(def.class.getName())) { + canonicalTypeName = canonicalTypeName.replace(def.class.getName(), DEF_TYPE_NAME); + } - if (painlessTypeComponent == def.class) { - char[] asmDescriptorBraces = new char[arrayDimensions]; - Arrays.fill(asmDescriptorBraces, '['); + return canonicalTypeName; + } - String asmDescriptor = new String(asmDescriptorBraces) + Type.getType(Object.class).getDescriptor(); - Type asmType = Type.getType(asmDescriptor); + /** + * Converts a list of types to a list of canonical type names as a string based on the terminology specified as part of the + * documentation for {@link PainlessLookupUtility}. Since classes are a subset of types, this method will safely convert a list + * of classes or a mixed list of classes and types to a list of canonical type names as a string as well. + */ + public static String typesToCanonicalTypeNames(List> types) { + StringBuilder typesStringBuilder = new StringBuilder("["); - try { - return Class.forName(asmType.getInternalName().replace('/', '.')); - } catch (ClassNotFoundException exception) { - throw new IllegalStateException("internal error", exception); - } + int anyTypesSize = types.size(); + int anyTypesIndex = 0; + + for (Class painlessType : types) { + String canonicalTypeName = typeToCanonicalTypeName(painlessType); + + typesStringBuilder.append(canonicalTypeName); + + if (++anyTypesIndex < anyTypesSize) { + typesStringBuilder.append(","); } - } else if (painlessType == def.class) { - return Object.class; } - return painlessType; - } + typesStringBuilder.append("]"); - public static String anyTypeNameToPainlessTypeName(String anyTypeName) { - return anyTypeName.replace(def.class.getName(), DEF_PAINLESS_CLASS_NAME).replace('$', '.'); + return typesStringBuilder.toString(); } + /** + * Converts a java type to a type based on the terminology specified as part of {@link PainlessLookupUtility} where if a type is an + * object class or object array, the returned type will be the equivalent def class or def array. Otherwise, this behaves as an + * identity function. + */ + public static Class javaTypeToType(Class javaType) { + Objects.requireNonNull(javaType); - public static String anyTypeToPainlessTypeName(Class anyType) { - if (anyType.isLocalClass() || anyType.isAnonymousClass()) { - return null; - } else if (anyType.isArray()) { - Class anyTypeComponent = anyType.getComponentType(); + if (javaType.isArray()) { + Class javaTypeComponent = javaType.getComponentType(); int arrayDimensions = 1; - while (anyTypeComponent.isArray()) { - anyTypeComponent = anyTypeComponent.getComponentType(); + while (javaTypeComponent.isArray()) { + javaTypeComponent = javaTypeComponent.getComponentType(); ++arrayDimensions; } - if (anyTypeComponent == def.class) { - StringBuilder painlessDefTypeNameArrayBuilder = new StringBuilder(DEF_PAINLESS_CLASS_NAME); + if (javaTypeComponent == Object.class) { + char[] arrayBraces = new char[arrayDimensions]; + Arrays.fill(arrayBraces, '['); - for (int dimension = 0; dimension < arrayDimensions; dimension++) { - painlessDefTypeNameArrayBuilder.append("[]"); + try { + return Class.forName(new String(arrayBraces) + "L" + def.class.getName() + ";"); + } catch (ClassNotFoundException cnfe) { + throw new IllegalStateException("internal error", cnfe); } - - return painlessDefTypeNameArrayBuilder.toString(); } - } else if (anyType == def.class) { - return DEF_PAINLESS_CLASS_NAME; + } else if (javaType == Object.class) { + return def.class; } - return anyType.getCanonicalName().replace('$', '.'); + return javaType; } - public static Class painlessTypeNameToPainlessType(String painlessTypeName, Map> painlessClassNamesToJavaClasses) { - Class javaClass = painlessClassNamesToJavaClasses.get(painlessTypeName); - - if (javaClass != null) { - return javaClass; - } - - int arrayDimensions = 0; - int arrayIndex = painlessTypeName.indexOf('['); + /** + * Converts a type to a java type based on the terminology specified as part of {@link PainlessLookupUtility} where if a type is a + * def class or def array, the returned type will be the equivalent object class or object array. Otherwise, this behaves as an + * identity function. + */ + public static Class typeToJavaType(Class type) { + Objects.requireNonNull(type); - if (arrayIndex != -1) { - int painlessTypeNameLength = painlessTypeName.length(); + if (type.isArray()) { + Class typeComponent = type.getComponentType(); + int arrayDimensions = 1; - while (arrayIndex < painlessTypeNameLength) { - if (painlessTypeName.charAt(arrayIndex) == '[' && - ++arrayIndex < painlessTypeNameLength && - painlessTypeName.charAt(arrayIndex++) == ']') { - ++arrayDimensions; - } else { - throw new IllegalArgumentException("invalid painless type [" + painlessTypeName + "]."); - } + while (typeComponent.isArray()) { + typeComponent = typeComponent.getComponentType(); + ++arrayDimensions; } - painlessTypeName = painlessTypeName.substring(0, painlessTypeName.indexOf('[')); - javaClass = painlessClassNamesToJavaClasses.get(painlessTypeName); - - char javaDescriptorBraces[] = new char[arrayDimensions]; - Arrays.fill(javaDescriptorBraces, '['); - String javaDescriptor = new String(javaDescriptorBraces); - - if (javaClass == boolean.class) { - javaDescriptor += "Z"; - } else if (javaClass == byte.class) { - javaDescriptor += "B"; - } else if (javaClass == short.class) { - javaDescriptor += "S"; - } else if (javaClass == char.class) { - javaDescriptor += "C"; - } else if (javaClass == int.class) { - javaDescriptor += "I"; - } else if (javaClass == long.class) { - javaDescriptor += "J"; - } else if (javaClass == float.class) { - javaDescriptor += "F"; - } else if (javaClass == double.class) { - javaDescriptor += "D"; - } else { - javaDescriptor += "L" + javaClass.getName() + ";"; - } + if (typeComponent == def.class) { + char[] arrayBraces = new char[arrayDimensions]; + Arrays.fill(arrayBraces, '['); - try { - return Class.forName(javaDescriptor); - } catch (ClassNotFoundException cnfe) { - throw new IllegalStateException("painless type [" + painlessTypeName + "] not found", cnfe); + try { + return Class.forName(new String(arrayBraces) + "L" + Object.class.getName() + ";"); + } catch (ClassNotFoundException cnfe) { + throw new IllegalStateException("internal error", cnfe); + } } + } else if (type == def.class) { + return Object.class; } - throw new IllegalArgumentException("painless type [" + painlessTypeName + "] not found"); + return type; } - public static void validatePainlessType(Class painlessType, Collection> javaClasses) { - String painlessTypeName = anyTypeNameToPainlessTypeName(painlessType.getName()); + /** + * Ensures a type exists based on the terminology specified as part of {@link PainlessLookupUtility}. Throws an + * {@link IllegalArgumentException} if the type does not exist. + */ + public static void validateType(Class type, Collection> classes) { + String canonicalTypeName = typeToCanonicalTypeName(type); - while (painlessType.getComponentType() != null) { - painlessType = painlessType.getComponentType(); + while (type.getComponentType() != null) { + type = type.getComponentType(); } - if (javaClasses.contains(painlessType) == false) { - throw new IllegalStateException("painless type [" + painlessTypeName + "] not found"); + if (classes.contains(type) == false) { + throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found"); } } - public static String buildPainlessMethodKey(String methodName, int methodArity) { - return methodName + "/" + methodArity; - } - - public static String buildPainlessFieldKey(String fieldName) { - return fieldName; - } - - public static Class getBoxedAnyType(Class anyType) { - if (anyType == boolean.class) { + /** + * Converts a type to its boxed type equivalent if one exists based on the terminology specified as part of + * {@link PainlessLookupUtility}. Otherwise, this behaves as an identity function. + */ + public static Class typeToBoxedType(Class type) { + if (type == boolean.class) { return Boolean.class; - } else if (anyType == byte.class) { + } else if (type == byte.class) { return Byte.class; - } else if (anyType == short.class) { + } else if (type == short.class) { return Short.class; - } else if (anyType == char.class) { + } else if (type == char.class) { return Character.class; - } else if (anyType == int.class) { + } else if (type == int.class) { return Integer.class; - } else if (anyType == long.class) { + } else if (type == long.class) { return Long.class; - } else if (anyType == float.class) { + } else if (type == float.class) { return Float.class; - } else if (anyType == double.class) { + } else if (type == double.class) { return Double.class; } - return anyType; + return type; } - public static Class getUnboxedAnyType(Class anyType) { - if (anyType == Boolean.class) { + /** + * Converts a type to its unboxed type equivalent if one exists based on the terminology specified as part of + * {@link PainlessLookupUtility}. Otherwise, this behaves as an identity function. + */ + public static Class typeToUnboxedType(Class type) { + if (type == Boolean.class) { return boolean.class; - } else if (anyType == Byte.class) { + } else if (type == Byte.class) { return byte.class; - } else if (anyType == Short.class) { + } else if (type == Short.class) { return short.class; - } else if (anyType == Character.class) { + } else if (type == Character.class) { return char.class; - } else if (anyType == Integer.class) { + } else if (type == Integer.class) { return int.class; - } else if (anyType == Long.class) { + } else if (type == Long.class) { return long.class; - } else if (anyType == Float.class) { + } else if (type == Float.class) { return float.class; - } else if (anyType == Double.class) { + } else if (type == Double.class) { return double.class; } - return anyType; + return type; + } + + /** + * Checks if a type based on the terminology specified as part of {@link PainlessLookupUtility} is available as a constant type + * where {@code true} is returned if the type is a constant type and {@code false} otherwise. + */ + public static boolean isConstantType(Class type) { + return type == boolean.class || + type == byte.class || + type == short.class || + type == char.class || + type == int.class || + type == long.class || + type == float.class || + type == double.class || + type == String.class; + } + + /** + * Constructs a painless method key used to lookup painless methods from a painless class. + */ + public static String buildPainlessMethodKey(String methodName, int methodArity) { + return methodName + "/" + methodArity; } - public static boolean isAnyTypeConstant(Class anyType) { - return anyType == boolean.class || - anyType == byte.class || - anyType == short.class || - anyType == char.class || - anyType == int.class || - anyType == long.class || - anyType == float.class || - anyType == double.class || - anyType == String.class; + /** + * Constructs a painless field key used to lookup painless fields from a painless class. + */ + public static String buildPainlessFieldKey(String fieldName) { + return fieldName; } - public static final String DEF_PAINLESS_CLASS_NAME = def.class.getSimpleName(); - public static final String CONSTRUCTOR_ANY_NAME = ""; + /** + * The def type name as specified in the source for a script. + */ + public static final String DEF_TYPE_NAME = "def"; + + /** + * The method name for all constructors. + */ + public static final String CONSTRUCTOR_NAME = ""; private PainlessLookupUtility() { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java index 8d8a7f691fecd..3321de94a267f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java @@ -21,6 +21,7 @@ import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Opcodes; +import org.objectweb.asm.Type; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodType; @@ -30,7 +31,7 @@ public class PainlessMethod { public final String name; - public final PainlessClass owner; + public final Class target; public final Class augmentation; public final Class rtn; public final List> arguments; @@ -38,11 +39,11 @@ public class PainlessMethod { public final int modifiers; public final MethodHandle handle; - public PainlessMethod(String name, PainlessClass owner, Class augmentation, Class rtn, List> arguments, + public PainlessMethod(String name, Class target, Class augmentation, Class rtn, List> arguments, org.objectweb.asm.commons.Method method, int modifiers, MethodHandle handle) { this.name = name; this.augmentation = augmentation; - this.owner = owner; + this.target = target; this.rtn = rtn; this.arguments = Collections.unmodifiableList(arguments); this.method = method; @@ -69,31 +70,31 @@ public MethodType getMethodType() { params = new Class[1 + arguments.size()]; params[0] = augmentation; for (int i = 0; i < arguments.size(); i++) { - params[i + 1] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); + params[i + 1] = PainlessLookupUtility.typeToJavaType(arguments.get(i)); } - returnValue = PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtn); + returnValue = PainlessLookupUtility.typeToJavaType(rtn); } else if (Modifier.isStatic(modifiers)) { // static method: straightforward copy params = new Class[arguments.size()]; for (int i = 0; i < arguments.size(); i++) { - params[i] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); + params[i] = PainlessLookupUtility.typeToJavaType(arguments.get(i)); } - returnValue = PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtn); + returnValue = PainlessLookupUtility.typeToJavaType(rtn); } else if ("".equals(name)) { // constructor: returns the owner class params = new Class[arguments.size()]; for (int i = 0; i < arguments.size(); i++) { - params[i] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); + params[i] = PainlessLookupUtility.typeToJavaType(arguments.get(i)); } - returnValue = owner.clazz; + returnValue = target; } else { // virtual/interface method: add receiver class params = new Class[1 + arguments.size()]; - params[0] = owner.clazz; + params[0] = target; for (int i = 0; i < arguments.size(); i++) { - params[i + 1] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); + params[i + 1] = PainlessLookupUtility.typeToJavaType(arguments.get(i)); } - returnValue = PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtn); + returnValue = PainlessLookupUtility.typeToJavaType(rtn); } return MethodType.methodType(returnValue, params); } @@ -106,8 +107,8 @@ public void write(MethodWriter writer) { clazz = augmentation; type = org.objectweb.asm.Type.getType(augmentation); } else { - clazz = owner.clazz; - type = owner.type; + clazz = target; + type = Type.getType(target); } if (Modifier.isStatic(modifiers)) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java deleted file mode 100644 index 49413ab0c5fef..0000000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.lookup; - -import java.util.Objects; - -/** - * Key for looking up a method. - *

- * Methods are keyed on both name and arity, and can be overloaded once per arity. - * This allows signatures such as {@code String.indexOf(String) vs String.indexOf(String, int)}. - *

- * It is less flexible than full signature overloading where types can differ too, but - * better than just the name, and overloading types adds complexity to users, too. - */ -public final class PainlessMethodKey { - public final String name; - public final int arity; - - /** - * Create a new lookup key - * @param name name of the method - * @param arity number of parameters - */ - public PainlessMethodKey(String name, int arity) { - this.name = Objects.requireNonNull(name); - this.arity = arity; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + arity; - result = prime * result + name.hashCode(); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - PainlessMethodKey other = (PainlessMethodKey) obj; - if (arity != other.arity) return false; - if (!name.equals(other.name)) return false; - return true; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(name); - sb.append('/'); - sb.append(arity); - return sb.toString(); - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java index dd813f73c3dfc..ddf289564b130 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java @@ -157,7 +157,7 @@ AExpression cast(Locals locals) { return ecast; } else { - if (PainlessLookupUtility.isAnyTypeConstant(expected)) { + if (PainlessLookupUtility.isConstantType(expected)) { // For the case where a cast is required, a constant is set, // and the constant can be immediately cast to the expected type. // An EConstant replaces this node with the constant cast appropriately diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java index 65776ca76f117..00abe788bf46d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java @@ -106,8 +106,8 @@ private void analyzeMul(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply multiply [*] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -149,8 +149,8 @@ private void analyzeDiv(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply divide [/] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -197,8 +197,8 @@ private void analyzeRem(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply remainder [%] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -245,8 +245,8 @@ private void analyzeAdd(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply add [+] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -304,8 +304,8 @@ private void analyzeSub(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply subtract [-] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -363,8 +363,8 @@ private void analyzeLSH(Locals variables) { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply left shift [<<] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote = lhspromote; @@ -411,8 +411,8 @@ private void analyzeRSH(Locals variables) { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply right shift [>>] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote = lhspromote; @@ -462,8 +462,8 @@ private void analyzeUSH(Locals variables) { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply unsigned shift [>>>] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (lhspromote == def.class || rhspromote == def.class) { @@ -506,8 +506,8 @@ private void analyzeBWAnd(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply and [&] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -546,8 +546,8 @@ private void analyzeXor(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply xor [^] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -587,8 +587,8 @@ private void analyzeBWOr(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply or [|] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index dfed0ca47b482..098c75386e1a6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -23,8 +23,8 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.List; import java.util.Objects; @@ -58,7 +58,7 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - PainlessMethodKey methodKey = new PainlessMethodKey(name, arguments.size()); + String methodKey = PainlessLookupUtility.buildPainlessMethodKey(name, arguments.size()); method = locals.getMethod(methodKey); if (method == null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index e8ad9d85ed698..7b35bc1b48ee5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -69,7 +69,7 @@ void analyze(Locals locals) { defPointer = "D" + variable + "." + call + ",1"; } else { // typed implementation - defPointer = "S" + PainlessLookupUtility.anyTypeToPainlessTypeName(captured.clazz) + "." + call + ",1"; + defPointer = "S" + PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz) + "." + call + ",1"; } actual = String.class; } else { @@ -77,8 +77,8 @@ void analyze(Locals locals) { // static case if (captured.clazz != def.class) { try { - ref = new FunctionRef( - locals.getPainlessLookup(), expected, PainlessLookupUtility.anyTypeToPainlessTypeName(captured.clazz), call, 1); + ref = new FunctionRef(locals.getPainlessLookup(), expected, + PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1); // check casts between the interface method and the delegate method are legal for (int i = 0; i < ref.interfaceMethod.arguments.size(); ++i) { @@ -110,7 +110,7 @@ void write(MethodWriter writer, Globals globals) { // typed interface, dynamic implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); Type methodType = Type.getMethodType(MethodWriter.getType(expected), MethodWriter.getType(captured.clazz)); - writer.invokeDefCall(call, methodType, DefBootstrap.REFERENCE, PainlessLookupUtility.anyTypeToPainlessTypeName(expected)); + writer.invokeDefCall(call, methodType, DefBootstrap.REFERENCE, PainlessLookupUtility.typeToCanonicalTypeName(expected)); } else { // typed interface, typed implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java index b0451b685b57d..b07613714b8ef 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java @@ -63,6 +63,6 @@ void write(MethodWriter writer, Globals globals) { @Override public String toString() { - return singleLineToString(PainlessLookupUtility.anyTypeToPainlessTypeName(cast.to), child); + return singleLineToString(PainlessLookupUtility.typeToCanonicalTypeName(cast.to), child); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java index edf18f501bc77..4d8a71ae3eb14 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java @@ -93,8 +93,8 @@ private void analyzeEq(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply equals [==] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -143,8 +143,8 @@ private void analyzeEqR(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply reference equals [===] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } left.expected = promotedType; @@ -184,8 +184,8 @@ private void analyzeNE(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply not equals [!=] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -234,8 +234,8 @@ private void analyzeNER(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply reference not equals [!==] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } left.expected = promotedType; @@ -275,8 +275,8 @@ private void analyzeGTE(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply greater than or equals [>=] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -315,8 +315,8 @@ private void analyzeGT(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply greater than [>] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -355,8 +355,8 @@ private void analyzeLTE(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply less than or equals [<=] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -395,8 +395,8 @@ private void analyzeLT(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply less than [>=] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (promotedType == def.class) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index d4eddb059a847..d787db5d41c92 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -27,7 +27,6 @@ import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; import java.util.Objects; @@ -69,12 +68,13 @@ void analyze(Locals locals) { PainlessMethod interfaceMethod = locals.getPainlessLookup().getPainlessStructFromJavaClass(expected).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface"); + "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); } - PainlessMethod delegateMethod = locals.getMethod(new PainlessMethodKey(call, interfaceMethod.arguments.size())); + PainlessMethod delegateMethod = + locals.getMethod(PainlessLookupUtility.buildPainlessMethodKey(call, interfaceMethod.arguments.size())); if (delegateMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], function not found"); + "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], function not found"); } ref = new FunctionRef(expected, interfaceMethod, delegateMethod, 0); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java index 05564a2952e6f..2fa8ca8ca9513 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -64,8 +64,8 @@ void analyze(Locals locals) { } // map to wrapped type for primitive types - resolvedType = clazz.isPrimitive() ? PainlessLookupUtility.getBoxedAnyType(clazz) : - PainlessLookupUtility.painlessDefTypeToJavaObjectType(clazz); + resolvedType = clazz.isPrimitive() ? PainlessLookupUtility.typeToBoxedType(clazz) : + PainlessLookupUtility.typeToJavaType(clazz); // analyze and cast the expression expression.analyze(locals); @@ -76,7 +76,7 @@ void analyze(Locals locals) { primitiveExpression = expression.actual.isPrimitive(); // map to wrapped type for primitive types expressionType = expression.actual.isPrimitive() ? - PainlessLookupUtility.getBoxedAnyType(expression.actual) : PainlessLookupUtility.painlessDefTypeToJavaObjectType(clazz); + PainlessLookupUtility.typeToBoxedType(expression.actual) : PainlessLookupUtility.typeToJavaType(clazz); actual = boolean.class; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index 8e8d164b03d62..ab1442be805eb 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -123,12 +123,12 @@ void analyze(Locals locals) { interfaceMethod = locals.getPainlessLookup().getPainlessStructFromJavaClass(expected).functionalMethod; if (interfaceMethod == null) { throw createError(new IllegalArgumentException("Cannot pass lambda to " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface")); } // check arity before we manipulate parameters if (interfaceMethod.arguments.size() != paramTypeStrs.size()) throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name + - "] in [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "]"); + "] in [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "]"); // for method invocation, its allowed to ignore the return value if (interfaceMethod.rtn == void.class) { returnType = def.class; @@ -140,7 +140,7 @@ void analyze(Locals locals) { for (int i = 0; i < paramTypeStrs.size(); i++) { String paramType = paramTypeStrs.get(i); if (paramType == null) { - actualParamTypeStrs.add(PainlessLookupUtility.anyTypeToPainlessTypeName(interfaceMethod.arguments.get(i))); + actualParamTypeStrs.add(PainlessLookupUtility.typeToCanonicalTypeName(interfaceMethod.arguments.get(i))); } else { actualParamTypeStrs.add(paramType); } @@ -162,14 +162,14 @@ void analyze(Locals locals) { List paramTypes = new ArrayList<>(captures.size() + actualParamTypeStrs.size()); List paramNames = new ArrayList<>(captures.size() + paramNameStrs.size()); for (Variable var : captures) { - paramTypes.add(PainlessLookupUtility.anyTypeToPainlessTypeName(var.clazz)); + paramTypes.add(PainlessLookupUtility.typeToCanonicalTypeName(var.clazz)); paramNames.add(var.name); } paramTypes.addAll(actualParamTypeStrs); paramNames.addAll(paramNameStrs); // desugar lambda body into a synthetic method - desugared = new SFunction(reserved, location, PainlessLookupUtility.anyTypeToPainlessTypeName(returnType), name, + desugared = new SFunction(reserved, location, PainlessLookupUtility.typeToCanonicalTypeName(returnType), name, paramTypes, paramNames, statements, true); desugared.generateSignature(locals.getPainlessLookup()); desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), returnType, diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index 90475419b3260..e0af653d2098a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -23,9 +23,10 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; +import org.objectweb.asm.Type; import java.util.ArrayList; import java.util.List; @@ -61,14 +62,15 @@ void analyze(Locals locals) { actual = ArrayList.class; - constructor = - locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get(new PainlessMethodKey("", 0)); + constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors + .get(PainlessLookupUtility.buildPainlessMethodKey("", 0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); } - method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods.get(new PainlessMethodKey("add", 1)); + method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods + .get(PainlessLookupUtility.buildPainlessMethodKey("add", 1)); if (method == null) { throw createError(new IllegalStateException("Illegal tree structure.")); @@ -90,7 +92,7 @@ void write(MethodWriter writer, Globals globals) { writer.newInstance(MethodWriter.getType(actual)); writer.dup(); - writer.invokeConstructor(constructor.owner.type, constructor.method); + writer.invokeConstructor(Type.getType(constructor.target), constructor.method); for (AExpression value : values) { writer.dup(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index c6474846d4c7a..d81f08dc3cc54 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -23,9 +23,10 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; +import org.objectweb.asm.Type; import java.util.HashMap; import java.util.List; @@ -67,14 +68,15 @@ void analyze(Locals locals) { actual = HashMap.class; - constructor = - locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get(new PainlessMethodKey("", 0)); + constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors + .get(PainlessLookupUtility.buildPainlessMethodKey("", 0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); } - method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods.get(new PainlessMethodKey("put", 2)); + method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods + .get(PainlessLookupUtility.buildPainlessMethodKey("put", 2)); if (method == null) { throw createError(new IllegalStateException("Illegal tree structure.")); @@ -109,7 +111,7 @@ void write(MethodWriter writer, Globals globals) { writer.newInstance(MethodWriter.getType(actual)); writer.dup(); - writer.invokeConstructor(constructor.owner.type, constructor.method); + writer.invokeConstructor(Type.getType(constructor.target), constructor.method); for (int index = 0; index < keys.size(); ++index) { AExpression key = keys.get(index); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index a780ea3e05be8..c0d4433f7fb5f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -24,8 +24,9 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.objectweb.asm.Type; import java.util.List; import java.util.Objects; @@ -64,7 +65,7 @@ void analyze(Locals locals) { } PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual); - constructor = struct.constructors.get(new PainlessMethodKey("", arguments.size())); + constructor = struct.constructors.get(PainlessLookupUtility.buildPainlessMethodKey("", arguments.size())); if (constructor != null) { Class[] types = new Class[constructor.arguments.size()]; @@ -104,7 +105,7 @@ void write(MethodWriter writer, Globals globals) { argument.write(writer, globals); } - writer.invokeConstructor(constructor.owner.type, constructor.method); + writer.invokeConstructor(Type.getType(constructor.target), constructor.method); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java index 6bc5331cb1d84..3a47dfc725f29 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java @@ -53,7 +53,7 @@ void analyze(Locals locals) { if (expected != null) { if (expected.isPrimitive()) { throw createError(new IllegalArgumentException( - "Cannot cast null to a primitive type [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "].")); + "Cannot cast null to a primitive type [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "].")); } actual = expected; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java index d34399db779df..1c0fce8187646 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java @@ -94,7 +94,7 @@ void analyzeBWNot(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply not [~] to type " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(child.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(child.actual) + "].")); } child.expected = promote; @@ -124,7 +124,7 @@ void analyzerAdd(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply positive [+] to type " + - "[" + PainlessLookupUtility.painlessDefTypeToJavaObjectType(child.actual) + "].")); + "[" + PainlessLookupUtility.typeToJavaType(child.actual) + "].")); } child.expected = promote; @@ -158,7 +158,7 @@ void analyzerSub(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply negative [-] to type " + - "[" + PainlessLookupUtility.painlessDefTypeToJavaObjectType(child.actual) + "].")); + "[" + PainlessLookupUtility.typeToJavaType(child.actual) + "].")); } child.expected = promote; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java index c45107a37ac21..7b55cb5a804a0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java @@ -68,7 +68,7 @@ void analyze(Locals locals) { sub = new PSubListShortcut(location, locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual), index); } else { throw createError(new IllegalArgumentException("Illegal array access on type " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual) + "].")); } sub.write = write; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index 445c053347ec3..8fc8a612b845d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -26,7 +26,6 @@ import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import java.util.List; @@ -74,10 +73,10 @@ void analyze(Locals locals) { PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual); if (prefix.actual.isPrimitive()) { - struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(PainlessLookupUtility.getBoxedAnyType(prefix.actual)); + struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(PainlessLookupUtility.typeToBoxedType(prefix.actual)); } - PainlessMethodKey methodKey = new PainlessMethodKey(name, arguments.size()); + String methodKey = PainlessLookupUtility.buildPainlessMethodKey(name, arguments.size()); PainlessMethod method = prefix instanceof EStatic ? struct.staticMethods.get(methodKey) : struct.methods.get(methodKey); if (method != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index 3f2f887956491..abf398d0e6725 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -27,7 +27,6 @@ import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import java.util.List; @@ -64,7 +63,7 @@ void analyze(Locals locals) { prefix = prefix.cast(locals); if (prefix.actual.isArray()) { - sub = new PSubArrayLength(location, PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual), value); + sub = new PSubArrayLength(location, PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual), value); } else if (prefix.actual == def.class) { sub = new PSubDefField(location, value); } else { @@ -74,19 +73,20 @@ void analyze(Locals locals) { if (field != null) { sub = new PSubField(location, field); } else { - PainlessMethod getter = struct.methods.get( - new PainlessMethodKey("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + PainlessMethod getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey( + "get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); if (getter == null) { - getter = struct.methods.get( - new PainlessMethodKey("is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey( + "is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); } - PainlessMethod setter = struct.methods.get( - new PainlessMethodKey("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); + PainlessMethod setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey( + "set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); if (getter != null || setter != null) { - sub = new PSubShortcut(location, value, PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual), getter, setter); + sub = new PSubShortcut( + location, value, PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual), getter, setter); } else { EConstant index = new EConstant(location, value); index.analyze(locals); @@ -104,7 +104,7 @@ void analyze(Locals locals) { if (sub == null) { throw createError(new IllegalArgumentException( - "Unknown field [" + value + "] for type [" + PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual) + "].")); + "Unknown field [" + value + "] for type [" + PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual) + "].")); } if (nullSafe) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java index 8eb154e745bf7..007a599e9f842 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java @@ -25,6 +25,7 @@ import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.objectweb.asm.Type; import java.lang.reflect.Modifier; import java.util.Objects; @@ -52,7 +53,7 @@ void extractVariables(Set variables) { void analyze(Locals locals) { if (write && Modifier.isFinal(field.modifiers)) { throw createError(new IllegalArgumentException("Cannot write to read-only field [" + field.name + "] for type " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(field.clazz) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(field.clazz) + "].")); } actual = field.clazz; @@ -63,9 +64,9 @@ void write(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.getStatic(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.getStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } else { - writer.getField(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.getField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } } @@ -94,9 +95,9 @@ void load(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.getStatic(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.getStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } else { - writer.getField(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.getField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } } @@ -105,9 +106,9 @@ void store(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.putStatic(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.putStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } else { - writer.putField(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.putField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java index 0a3ab142ddc7c..3841b1fece115 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java @@ -25,8 +25,8 @@ import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.WriterConstants; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Objects; import java.util.Set; @@ -56,8 +56,8 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - getter = struct.methods.get(new PainlessMethodKey("get", 1)); - setter = struct.methods.get(new PainlessMethodKey("set", 2)); + getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("get", 1)); + setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("set", 2)); if (getter != null && (getter.rtn == void.class || getter.arguments.size() != 1 || getter.arguments.get(0) != int.class)) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java index f71e2ac5d1fa0..13a3b9c9b9429 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java @@ -24,8 +24,8 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Objects; import java.util.Set; @@ -55,8 +55,8 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - getter = struct.methods.get(new PainlessMethodKey("get", 1)); - setter = struct.methods.get(new PainlessMethodKey("put", 2)); + getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("get", 1)); + setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("put", 2)); if (getter != null && (getter.rtn == void.class || getter.arguments.size() != 1)) { throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + struct.name + "].")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java index de1a7062a24f2..9ff57e6b913cc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java @@ -85,7 +85,7 @@ void analyze(Locals locals) { sub = new SSubEachIterable(location, variable, expression, block); } else { throw createError(new IllegalArgumentException("Illegal for each type " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expression.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expression.actual) + "].")); } sub.analyze(locals); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 1c801d509b581..7c243e296c7e3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -136,7 +136,7 @@ void generateSignature(PainlessLookup painlessLookup) { try { Class paramType = painlessLookup.getJavaClassFromPainlessType(this.paramTypeStrs.get(param)); - paramClasses[param] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(paramType); + paramClasses[param] = PainlessLookupUtility.typeToJavaType(paramType); paramTypes.add(paramType); parameters.add(new Parameter(location, paramNameStrs.get(param), paramType)); } catch (IllegalArgumentException exception) { @@ -146,7 +146,7 @@ void generateSignature(PainlessLookup painlessLookup) { } org.objectweb.asm.commons.Method method = new org.objectweb.asm.commons.Method(name, MethodType.methodType( - PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtnType), paramClasses).toMethodDescriptorString()); + PainlessLookupUtility.typeToJavaType(rtnType), paramClasses).toMethodDescriptorString()); this.method = new PainlessMethod(name, null, null, rtnType, paramTypes, method, Modifier.STATIC | Modifier.PRIVATE, null); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index cd473e2c84ec7..c354e78a961a3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -30,8 +30,8 @@ import org.elasticsearch.painless.SimpleChecksAdapter; import org.elasticsearch.painless.WriterConstants; import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.node.SFunction.FunctionReserved; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; @@ -165,12 +165,12 @@ void extractVariables(Set variables) { } public void analyze(PainlessLookup painlessLookup) { - Map methods = new HashMap<>(); + Map methods = new HashMap<>(); for (SFunction function : functions) { function.generateSignature(painlessLookup); - PainlessMethodKey key = new PainlessMethodKey(function.name, function.parameters.size()); + String key = PainlessLookupUtility.buildPainlessMethodKey(function.name, function.parameters.size()); if (methods.put(key, function.method) != null) { throw createError(new IllegalArgumentException("Duplicate functions with name [" + function.name + "].")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java index fea8c8953b67f..7e0d74865f9c7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java @@ -109,6 +109,6 @@ void write(MethodWriter writer, Globals globals) { @Override public String toString() { - return singleLineToString(PainlessLookupUtility.anyTypeToPainlessTypeName(variable.clazz), variable.name, expression, block); + return singleLineToString(PainlessLookupUtility.typeToCanonicalTypeName(variable.clazz), variable.name, expression, block); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index cec1297a4c41c..12e3154eb562e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -29,7 +29,6 @@ import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; @@ -77,12 +76,12 @@ void analyze(Locals locals) { if (expression.actual == def.class) { method = null; } else { - method = locals.getPainlessLookup(). - getPainlessStructFromJavaClass(expression.actual).methods.get(new PainlessMethodKey("iterator", 0)); + method = locals.getPainlessLookup().getPainlessStructFromJavaClass(expression.actual).methods + .get(PainlessLookupUtility.buildPainlessMethodKey("iterator", 0)); if (method == null) { throw createError(new IllegalArgumentException("Unable to create iterator for the type " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expression.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expression.actual) + "].")); } } @@ -133,6 +132,6 @@ void write(MethodWriter writer, Globals globals) { @Override public String toString() { - return singleLineToString(PainlessLookupUtility.anyTypeToPainlessTypeName(variable.clazz), variable.name, expression, block); + return singleLineToString(PainlessLookupUtility.typeToCanonicalTypeName(variable.clazz), variable.name, expression, block); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index 5e8e6ad47d813..e26a5a38c76b8 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -27,6 +27,7 @@ import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.spi.Whitelist; @@ -67,8 +68,8 @@ public static void main(String[] args) throws IOException { Path indexPath = apiRootPath.resolve("index.asciidoc"); logger.info("Starting to write [index.asciidoc]"); try (PrintStream indexStream = new PrintStream( - Files.newOutputStream(indexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), - false, StandardCharsets.UTF_8.name())) { + Files.newOutputStream(indexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), + false, StandardCharsets.UTF_8.name())) { emitGeneratedWarning(indexStream); List structs = PAINLESS_LOOKUP.getStructs().stream().sorted(comparing(t -> t.name)).collect(toList()); for (PainlessClass struct : structs) { @@ -91,7 +92,7 @@ public static void main(String[] args) throws IOException { false, StandardCharsets.UTF_8.name())) { emitGeneratedWarning(typeStream); typeStream.print("[["); - emitAnchor(typeStream, struct); + emitAnchor(typeStream, struct.clazz); typeStream.print("]]++"); typeStream.print(struct.name); typeStream.println("++::"); @@ -104,10 +105,11 @@ public static void main(String[] args) throws IOException { struct.constructors.values().stream().sorted(NUMBER_OF_ARGS).forEach(documentMethod); Map inherited = new TreeMap<>(); struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(method -> { - if (method.owner == struct) { + if (method.target == struct.clazz) { documentMethod(typeStream, method); } else { - inherited.put(method.owner.name, method.owner); + PainlessClass painlessClass = PAINLESS_LOOKUP.getPainlessStructFromJavaClass(method.target); + inherited.put(painlessClass.name, painlessClass); } }); @@ -206,16 +208,16 @@ private static void documentMethod(PrintStream stream, PainlessMethod method) { /** * Anchor text for a {@link PainlessClass}. */ - private static void emitAnchor(PrintStream stream, PainlessClass struct) { + private static void emitAnchor(PrintStream stream, Class clazz) { stream.print("painless-api-reference-"); - stream.print(struct.name.replace('.', '-')); + stream.print(PainlessLookupUtility.typeToCanonicalTypeName(clazz).replace('.', '-')); } /** * Anchor text for a {@link PainlessMethod}. */ private static void emitAnchor(PrintStream stream, PainlessMethod method) { - emitAnchor(stream, method.owner); + emitAnchor(stream, method.target); stream.print('-'); stream.print(methodName(method)); stream.print('-'); @@ -226,18 +228,18 @@ private static void emitAnchor(PrintStream stream, PainlessMethod method) { * Anchor text for a {@link PainlessField}. */ private static void emitAnchor(PrintStream stream, PainlessField field) { - emitAnchor(stream, field.owner); + emitAnchor(stream, field.target); stream.print('-'); stream.print(field.name); } private static String methodName(PainlessMethod method) { - return method.name.equals("") ? method.owner.name : method.name; + return method.name.equals("") ? PainlessLookupUtility.typeToCanonicalTypeName(method.target) : method.name; } /** * Emit a {@link Class}. If the type is primitive or an array of primitives this just emits the name of the type. Otherwise this emits - an internal link with the text. + an internal link with the text. */ private static void emitType(PrintStream stream, Class clazz) { emitStruct(stream, PAINLESS_LOOKUP.getPainlessStructFromJavaClass(clazz)); @@ -253,7 +255,7 @@ private static void emitType(PrintStream stream, Class clazz) { private static void emitStruct(PrintStream stream, PainlessClass struct) { if (false == struct.clazz.isPrimitive() && false == struct.name.equals("def")) { stream.print("<<"); - emitAnchor(stream, struct); + emitAnchor(stream, struct.clazz); stream.print(','); stream.print(struct.name); stream.print(">>"); @@ -271,14 +273,14 @@ private static void emitJavadocLink(PrintStream stream, String root, PainlessMet stream.print("link:{"); stream.print(root); stream.print("-javadoc}/"); - stream.print(classUrlPath(method.augmentation != null ? method.augmentation : method.owner.clazz)); + stream.print(classUrlPath(method.augmentation != null ? method.augmentation : method.target)); stream.print(".html#"); stream.print(methodName(method)); stream.print("%2D"); boolean first = true; if (method.augmentation != null) { first = false; - stream.print(method.owner.clazz.getName()); + stream.print(method.target.getName()); } for (Class clazz: method.arguments) { if (first) { @@ -303,7 +305,7 @@ private static void emitJavadocLink(PrintStream stream, String root, PainlessFie stream.print("link:{"); stream.print(root); stream.print("-javadoc}/"); - stream.print(classUrlPath(field.owner.clazz)); + stream.print(classUrlPath(field.target)); stream.print(".html#"); stream.print(field.javaName); } @@ -315,21 +317,21 @@ private static String javadocRoot(PainlessMethod method) { if (method.augmentation != null) { return "painless"; } - return javadocRoot(method.owner); + return javadocRoot(method.target); } /** * Pick the javadoc root for a {@link PainlessField}. */ private static String javadocRoot(PainlessField field) { - return javadocRoot(field.owner); + return javadocRoot(field.target); } /** - * Pick the javadoc root for a {@link PainlessClass}. + * Pick the javadoc root for a {@link Class}. */ - private static String javadocRoot(PainlessClass struct) { - String classPackage = struct.clazz.getPackage().getName(); + private static String javadocRoot(Class clazz) { + String classPackage = clazz.getPackage().getName(); if (classPackage.startsWith("java")) { return "java8"; } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteApiTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteApiTests.java new file mode 100644 index 0000000000000..ce92a224f4e90 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteApiTests.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.painless; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.painless.PainlessExecuteAction.Request; +import org.elasticsearch.painless.PainlessExecuteAction.Response; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptException; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.singletonMap; +import static org.elasticsearch.painless.PainlessExecuteAction.TransportAction.innerShardOperation; +import static org.hamcrest.Matchers.equalTo; + +public class PainlessExecuteApiTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Collections.singleton(PainlessPlugin.class); + } + + public void testDefaults() throws IOException { + ScriptService scriptService = getInstanceFromNode(ScriptService.class); + Request request = new Request(new Script("100.0 / 1000.0"), null, null); + Response response = innerShardOperation(request, scriptService, null); + assertThat(response.getResult(), equalTo("0.1")); + + Map params = new HashMap<>(); + params.put("count", 100.0D); + params.put("total", 1000.0D); + request = new Request(new Script(ScriptType.INLINE, "painless", "params.count / params.total", params), null, null); + response = innerShardOperation(request, scriptService, null); + assertThat(response.getResult(), equalTo("0.1")); + + Exception e = expectThrows(ScriptException.class, + () -> { + Request r = new Request(new Script(ScriptType.INLINE, + "painless", "params.count / params.total + doc['constant']", params), null, null); + innerShardOperation(r, scriptService, null); + }); + assertThat(e.getCause().getMessage(), equalTo("Variable [doc] is not defined.")); + } + + public void testFilterExecutionContext() throws IOException { + ScriptService scriptService = getInstanceFromNode(ScriptService.class); + IndexService indexService = createIndex("index", Settings.EMPTY, "doc", "field", "type=long"); + + Request.ContextSetup contextSetup = new Request.ContextSetup("index", new BytesArray("{\"field\": 3}"), null); + contextSetup.setXContentType(XContentType.JSON); + Request request = new Request(new Script("doc['field'].value >= 3"), "filter", contextSetup); + Response response = innerShardOperation(request, scriptService, indexService); + assertThat(response.getResult(), equalTo(true)); + + contextSetup = new Request.ContextSetup("index", new BytesArray("{\"field\": 3}"), null); + contextSetup.setXContentType(XContentType.JSON); + request = new Request(new Script(ScriptType.INLINE, "painless", "doc['field'].value >= params.max", + singletonMap("max", 3)), "filter", contextSetup); + response = innerShardOperation(request, scriptService, indexService); + assertThat(response.getResult(), equalTo(true)); + + contextSetup = new Request.ContextSetup("index", new BytesArray("{\"field\": 2}"), null); + contextSetup.setXContentType(XContentType.JSON); + request = new Request(new Script(ScriptType.INLINE, "painless", "doc['field'].value >= params.max", + singletonMap("max", 3)), "filter", contextSetup); + response = innerShardOperation(request, scriptService, indexService); + assertThat(response.getResult(), equalTo(false)); + } + + public void testScoreExecutionContext() throws IOException { + ScriptService scriptService = getInstanceFromNode(ScriptService.class); + IndexService indexService = createIndex("index", Settings.EMPTY, "doc", "rank", "type=long", "text", "type=text"); + + Request.ContextSetup contextSetup = new Request.ContextSetup("index", + new BytesArray("{\"rank\": 4.0, \"text\": \"quick brown fox\"}"), new MatchQueryBuilder("text", "fox")); + contextSetup.setXContentType(XContentType.JSON); + Request request = new Request(new Script(ScriptType.INLINE, "painless", + "Math.round((_score + (doc['rank'].value / params.max_rank)) * 100.0) / 100.0", singletonMap("max_rank", 5.0)), "score", + contextSetup); + Response response = innerShardOperation(request, scriptService, indexService); + assertThat(response.getResult(), equalTo(1.09D)); + } + +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java index 488ae0e1643bc..44cd6b5304dc4 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java @@ -18,9 +18,18 @@ */ package org.elasticsearch.painless; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.painless.PainlessExecuteAction.Request.ContextSetup; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import java.io.IOException; @@ -28,12 +37,22 @@ public class PainlessExecuteRequestTests extends AbstractStreamableXContentTestCase { + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedWriteables()); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedXContents()); + } + @Override protected PainlessExecuteAction.Request createTestInstance() { Script script = new Script(randomAlphaOfLength(10)); - PainlessExecuteAction.Request.SupportedContext context = randomBoolean() ? - PainlessExecuteAction.Request.SupportedContext.PAINLESS_TEST : null; - return new PainlessExecuteAction.Request(script, context); + ScriptContext context = randomBoolean() ? randomFrom(PainlessExecuteAction.Request.SUPPORTED_CONTEXTS.values()) : null; + ContextSetup contextSetup = randomBoolean() ? randomContextSetup() : null; + return new PainlessExecuteAction.Request(script, context != null ? context.name : null, contextSetup); } @Override @@ -53,9 +72,26 @@ protected boolean supportsUnknownFields() { public void testValidate() { Script script = new Script(ScriptType.STORED, null, randomAlphaOfLength(10), Collections.emptyMap()); - PainlessExecuteAction.Request request = new PainlessExecuteAction.Request(script, null); + PainlessExecuteAction.Request request = new PainlessExecuteAction.Request(script, null, null); Exception e = request.validate(); assertNotNull(e); assertEquals("Validation Failed: 1: only inline scripts are supported;", e.getMessage()); } + + private static ContextSetup randomContextSetup() { + String index = randomBoolean() ? randomAlphaOfLength(4) : null; + QueryBuilder query = randomBoolean() ? new MatchAllQueryBuilder() : null; + // TODO: pass down XContextType to createTestInstance() method. + // otherwise the document itself is different causing test failures. + // This should be done in a seperate change as the test instance is created before xcontent type is randomly picked and + // all the createTestInstance() methods need to be changed, which will make this a big chnage +// BytesReference doc = randomBoolean() ? new BytesArray("{}") : null; + BytesReference doc = null; + + ContextSetup contextSetup = new ContextSetup(index, doc, query); +// if (doc != null) { +// contextSetup.setXContentType(XContentType.JSON); +// } + return contextSetup; + } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java index 86d365e0fcc7a..cd3e4123e1267 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java @@ -24,8 +24,8 @@ import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.FeatureTest; import org.elasticsearch.painless.GenericElasticsearchScript; @@ -405,14 +405,14 @@ public void testPSubBrace() { public void testPSubCallInvoke() { Location l = new Location(getTestName(), 0); PainlessClass c = painlessLookup.getPainlessStructFromJavaClass(Integer.class); - PainlessMethod m = c.methods.get(new PainlessMethodKey("toString", 0)); + PainlessMethod m = c.methods.get(PainlessLookupUtility.buildPainlessMethodKey("toString", 0)); PSubCallInvoke node = new PSubCallInvoke(l, m, null, emptyList()); node.prefix = new EVariable(l, "a"); assertEquals("(PSubCallInvoke (EVariable a) toString)", node.toString()); assertEquals("(PSubNullSafeCallInvoke (PSubCallInvoke (EVariable a) toString))", new PSubNullSafeCallInvoke(l, node).toString()); l = new Location(getTestName(), 1); - m = c.methods.get(new PainlessMethodKey("equals", 1)); + m = c.methods.get(PainlessLookupUtility.buildPainlessMethodKey("equals", 1)); node = new PSubCallInvoke(l, m, null, singletonList(new EVariable(l, "b"))); node.prefix = new EVariable(l, "a"); assertEquals("(PSubCallInvoke (EVariable a) equals (Args (EVariable b)))", node.toString()); @@ -502,8 +502,8 @@ public void testPSubMapShortcut() { public void testPSubShortcut() { Location l = new Location(getTestName(), 0); PainlessClass s = painlessLookup.getPainlessStructFromJavaClass(FeatureTest.class); - PainlessMethod getter = s.methods.get(new PainlessMethodKey("getX", 0)); - PainlessMethod setter = s.methods.get(new PainlessMethodKey("setX", 1)); + PainlessMethod getter = s.methods.get(PainlessLookupUtility.buildPainlessMethodKey("getX", 0)); + PainlessMethod setter = s.methods.get(PainlessLookupUtility.buildPainlessMethodKey("setX", 1)); PSubShortcut node = new PSubShortcut(l, "x", FeatureTest.class.getName(), getter, setter); node.prefix = new EVariable(l, "a"); assertEquals("(PSubShortcut (EVariable a) x)", node.toString()); diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml index 7b915cc38dbc0..1e34a776189b8 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml @@ -1,3 +1,18 @@ +setup: + - do: + indices.create: + index: my-index + body: + mappings: + doc: + properties: + rank: + type: long + field: + type: keyword + text: + type: text + --- "Execute with defaults": - do: @@ -11,7 +26,7 @@ - match: { result: "0.1" } --- -"Execute with execute_api_script context": +"Execute with painless_test context": - do: scripts_painless_execute: body: @@ -20,6 +35,37 @@ params: var1: 10 var2: 100 - context: - painless_test: {} + context: "painless_test" - match: { result: "-90" } + +--- +"Execute with filter context": + - do: + scripts_painless_execute: + body: + script: + source: "doc['field'].value.length() <= params.max_length" + params: + max_length: 4 + context: "filter" + context_setup: + document: + field: "four" + index: "my-index" + - match: { result: true } + +--- +"Execute with score context": + - do: + scripts_painless_execute: + body: + script: + source: "doc['rank'].value / params.max_rank" + params: + max_rank: 5.0 + context: "score" + context_setup: + document: + rank: 4 + index: "my-index" + - match: { result: 0.8 } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index f24a9710d292a..5bbf998883eee 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -194,6 +194,10 @@ Query getCandidateMatchesQuery() { return candidateMatchesQuery; } + Query getVerifiedMatchesQuery() { + return verifiedMatchesQuery; + } + // Comparing identity here to avoid being cached // Note that in theory if the same instance gets used multiple times it could still get cached, // however since we create a new query instance each time we this query this shouldn't happen and thus diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index c106379519300..e28fbead29abd 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -618,13 +618,13 @@ protected Analyzer getWrappedAnalyzer(String fieldName) { docSearcher.setQueryCache(null); } - PercolatorFieldMapper percolatorFieldMapper = (PercolatorFieldMapper) docMapper.mappers().getMapper(field); - boolean mapUnmappedFieldsAsString = percolatorFieldMapper.isMapUnmappedFieldAsText(); + PercolatorFieldMapper.FieldType pft = (PercolatorFieldMapper.FieldType) fieldType; + String name = this.name != null ? this.name : pft.name(); QueryShardContext percolateShardContext = wrap(context); + PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, + percolateShardContext, + pft.mapUnmappedFieldsAsText); - String name = this.name != null ? this.name : field; - PercolatorFieldMapper.FieldType pft = (PercolatorFieldMapper.FieldType) fieldType; - PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, percolateShardContext, mapUnmappedFieldsAsString); return pft.percolateQuery(name, queryStore, documents, docSearcher, context.indexVersionCreated()); } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 103679f5328ef..414d1471e5cd1 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -136,6 +136,8 @@ public PercolatorFieldMapper build(BuilderContext context) { fieldType.rangeField = rangeFieldMapper.fieldType(); NumberFieldMapper minimumShouldMatchFieldMapper = createMinimumShouldMatchField(context); fieldType.minimumShouldMatchField = minimumShouldMatchFieldMapper.fieldType(); + fieldType.mapUnmappedFieldsAsText = getMapUnmappedFieldAsText(context.indexSettings()); + context.path().remove(); setupFieldType(context); return new PercolatorFieldMapper(name(), fieldType, defaultFieldType, context.indexSettings(), @@ -143,6 +145,10 @@ public PercolatorFieldMapper build(BuilderContext context) { extractionResultField, queryBuilderField, rangeFieldMapper, minimumShouldMatchFieldMapper); } + private static boolean getMapUnmappedFieldAsText(Settings indexSettings) { + return INDEX_MAP_UNMAPPED_FIELDS_AS_TEXT_SETTING.get(indexSettings); + } + static KeywordFieldMapper createExtractQueryFieldBuilder(String name, BuilderContext context) { KeywordFieldMapper.Builder queryMetaDataFieldBuilder = new KeywordFieldMapper.Builder(name); queryMetaDataFieldBuilder.docValues(false); @@ -195,6 +201,7 @@ static class FieldType extends MappedFieldType { MappedFieldType minimumShouldMatchField; RangeFieldMapper.RangeFieldType rangeField; + boolean mapUnmappedFieldsAsText; FieldType() { setIndexOptions(IndexOptions.NONE); @@ -209,6 +216,7 @@ static class FieldType extends MappedFieldType { queryBuilderField = ref.queryBuilderField; rangeField = ref.rangeField; minimumShouldMatchField = ref.minimumShouldMatchField; + mapUnmappedFieldsAsText = ref.mapUnmappedFieldsAsText; } @Override @@ -327,7 +335,6 @@ Tuple, Map>> extractTermsAndRanges(IndexRead } - private final boolean mapUnmappedFieldAsText; private final Supplier queryShardContext; private KeywordFieldMapper queryTermsField; private KeywordFieldMapper extractionResultField; @@ -348,14 +355,9 @@ Tuple, Map>> extractTermsAndRanges(IndexRead this.extractionResultField = extractionResultField; this.queryBuilderField = queryBuilderField; this.minimumShouldMatchFieldMapper = minimumShouldMatchFieldMapper; - this.mapUnmappedFieldAsText = getMapUnmappedFieldAsText(indexSettings); this.rangeFieldMapper = rangeFieldMapper; } - private static boolean getMapUnmappedFieldAsText(Settings indexSettings) { - return INDEX_MAP_UNMAPPED_FIELDS_AS_TEXT_SETTING.get(indexSettings); - } - @Override public FieldMapper updateFieldType(Map fullNameToFieldType) { PercolatorFieldMapper updated = (PercolatorFieldMapper) super.updateFieldType(fullNameToFieldType); @@ -402,7 +404,7 @@ public Mapper parse(ParseContext context) throws IOException { Version indexVersion = context.mapperService().getIndexSettings().getIndexVersionCreated(); createQueryBuilderField(indexVersion, queryBuilderField, queryBuilder, context); - Query query = toQuery(queryShardContext, mapUnmappedFieldAsText, queryBuilder); + Query query = toQuery(queryShardContext, isMapUnmappedFieldAsText(), queryBuilder); processQuery(query, context); return null; } @@ -522,7 +524,7 @@ protected String contentType() { } boolean isMapUnmappedFieldAsText() { - return mapUnmappedFieldAsText; + return ((FieldType) fieldType).mapUnmappedFieldsAsText; } /** diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 353111860e21e..9c8979601e8dc 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -196,8 +196,7 @@ public void testDuel() throws Exception { } Collections.sort(intValues); - MappedFieldType intFieldType = mapperService.documentMapper("type").mappers() - .getMapper("int_field").fieldType(); + MappedFieldType intFieldType = mapperService.fullName("int_field"); List> queryFunctions = new ArrayList<>(); queryFunctions.add(MatchNoDocsQuery::new); @@ -329,8 +328,7 @@ public void testDuel2() throws Exception { stringValues.add("value2"); stringValues.add("value3"); - MappedFieldType intFieldType = mapperService.documentMapper("type").mappers() - .getMapper("int_field").fieldType(); + MappedFieldType intFieldType = mapperService.fullName("int_field"); List ranges = new ArrayList<>(); ranges.add(new int[]{-5, 5}); ranges.add(new int[]{0, 10}); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index 3b3ff4ed15c87..e7163edef94c9 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -75,7 +75,8 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase> getPlugins() { @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { queryField = randomAlphaOfLength(4); + aliasField = randomAlphaOfLength(4); + String docType = "_doc"; mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType, - queryField, "type=percolator" + queryField, "type=percolator", aliasField, "type=alias,path=" + queryField ))), MapperService.MergeReason.MAPPING_UPDATE); mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType, STRING_FIELD_NAME, "type=text" @@ -355,4 +358,21 @@ public void testSerializationFailsUnlessFetched() throws IOException { builder = rewriteAndFetch(builder, createShardContext()); builder.writeTo(new BytesStreamOutput(10)); } + + public void testFieldAlias() throws IOException { + QueryShardContext shardContext = createShardContext(); + + PercolateQueryBuilder builder = doCreateTestQueryBuilder(false); + QueryBuilder rewrittenBuilder = rewriteAndFetch(builder, shardContext); + PercolateQuery query = (PercolateQuery) rewrittenBuilder.toQuery(shardContext); + + PercolateQueryBuilder aliasBuilder = new PercolateQueryBuilder(aliasField, + builder.getDocuments(), + builder.getXContentType()); + QueryBuilder rewrittenAliasBuilder = rewriteAndFetch(aliasBuilder, shardContext); + PercolateQuery aliasQuery = (PercolateQuery) rewrittenAliasBuilder.toQuery(shardContext); + + assertEquals(query.getCandidateMatchesQuery(), aliasQuery.getCandidateMatchesQuery()); + assertEquals(query.getVerifiedMatchesQuery(), aliasQuery.getVerifiedMatchesQuery()); + } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 597fbcf663202..80524a2f862fe 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -230,10 +230,10 @@ public void testExtractTerms() throws Exception { public void testExtractRanges() throws Exception { addQueryFieldMappings(); BooleanQuery.Builder bq = new BooleanQuery.Builder(); - Query rangeQuery1 = mapperService.documentMapper("doc").mappers().getMapper("number_field1").fieldType() + Query rangeQuery1 = mapperService.fullName("number_field1") .rangeQuery(10, 20, true, true, null, null, null, null); bq.add(rangeQuery1, Occur.MUST); - Query rangeQuery2 = mapperService.documentMapper("doc").mappers().getMapper("number_field1").fieldType() + Query rangeQuery2 = mapperService.fullName("number_field1") .rangeQuery(15, 20, true, true, null, null, null, null); bq.add(rangeQuery2, Occur.MUST); @@ -265,7 +265,7 @@ public void testExtractRanges() throws Exception { // Range queries on different fields: bq = new BooleanQuery.Builder(); bq.add(rangeQuery1, Occur.MUST); - rangeQuery2 = mapperService.documentMapper("doc").mappers().getMapper("number_field2").fieldType() + rangeQuery2 = mapperService.fullName("number_field2") .rangeQuery(15, 20, true, true, null, null, null, null); bq.add(rangeQuery2, Occur.MUST); diff --git a/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java index 0766560a849a6..93c785e754a54 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java @@ -96,7 +96,7 @@ public void updateByQuery() { updateByQuery.source("source_index") .script(new Script( ScriptType.INLINE, - "if (ctx._source.awesome == 'absolutely) {" + "if (ctx._source.awesome == 'absolutely') {" + " ctx.op='noop'" + "} else if (ctx._source.awesome == 'lame') {" + " ctx.op='delete'" diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java index bea717835917b..0b84f538dcd79 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java @@ -46,7 +46,7 @@ public class IcuTokenizerFactory extends AbstractTokenizerFactory { private static final String RULE_FILES = "rule_files"; public IcuTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); config = getIcuConfig(environment, settings); } diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiTokenizerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiTokenizerFactory.java index 2f00e68a75ebc..e9268f7306512 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiTokenizerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiTokenizerFactory.java @@ -45,7 +45,7 @@ public class KuromojiTokenizerFactory extends AbstractTokenizerFactory { private boolean discartPunctuation; public KuromojiTokenizerFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); mode = getMode(settings); userDictionary = getUserDictionary(env, settings); discartPunctuation = settings.getAsBoolean("discard_punctuation", true); diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java index 346cc84e5e6b4..9295ed95c3fb8 100644 --- a/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java +++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java @@ -38,7 +38,7 @@ public class NoriTokenizerFactory extends AbstractTokenizerFactory { private final KoreanTokenizer.DecompoundMode decompoundMode; public NoriTokenizerFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); decompoundMode = getMode(settings); userDictionary = getUserDictionary(env, settings); } diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java index 9d38729615205..560bce9db2701 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java @@ -28,7 +28,7 @@ public class SmartChineseTokenizerTokenizerFactory extends AbstractTokenizerFactory { public SmartChineseTokenizerTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java index 1b8ca38aec442..c5a5f1df98ee2 100644 --- a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java @@ -40,6 +40,8 @@ import org.elasticsearch.transport.TcpTransport; import org.junit.AfterClass; import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.ExternalResource; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; @@ -87,6 +89,14 @@ protected Collection> nodePlugins() { private static Path keyStoreFile; + @ClassRule + public static final ExternalResource MUTE_IN_FIPS_JVM = new ExternalResource() { + @Override + protected void before() { + assumeFalse("Can't run in a FIPS JVM because none of the supported Keystore types can be used", inFipsJvm()); + } + }; + @BeforeClass public static void setupKeyStore() throws IOException { Path tempDir = createTempDir(); diff --git a/plugins/discovery-ec2/qa/build.gradle b/plugins/discovery-ec2/qa/build.gradle index e69de29bb2d1d..0aed6df883825 100644 --- a/plugins/discovery-ec2/qa/build.gradle +++ b/plugins/discovery-ec2/qa/build.gradle @@ -0,0 +1 @@ +group = "${group}.plugins.discovery-ec2.qa" diff --git a/plugins/repository-azure/qa/build.gradle b/plugins/repository-azure/qa/build.gradle index e69de29bb2d1d..5c25485a8f596 100644 --- a/plugins/repository-azure/qa/build.gradle +++ b/plugins/repository-azure/qa/build.gradle @@ -0,0 +1 @@ +group = "${group}.plugins.repository-azure.qa" diff --git a/plugins/repository-gcs/qa/build.gradle b/plugins/repository-gcs/qa/build.gradle index e69de29bb2d1d..d10d9050dfecf 100644 --- a/plugins/repository-gcs/qa/build.gradle +++ b/plugins/repository-gcs/qa/build.gradle @@ -0,0 +1 @@ +group = "${group}.plugins.repository-gcs.qa" diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 5af0a412b4cc0..181891e20564d 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -89,18 +89,34 @@ String s3TemporarySessionToken = System.getenv("amazon_s3_session_token_temporar String s3TemporaryBucket = System.getenv("amazon_s3_bucket_temporary") String s3TemporaryBasePath = System.getenv("amazon_s3_base_path_temporary") +String s3EC2Bucket = System.getenv("amazon_s3_bucket_ec2") +String s3EC2BasePath = System.getenv("amazon_s3_base_path_ec2") + +String s3ECSBucket = System.getenv("amazon_s3_bucket_ecs") +String s3ECSBasePath = System.getenv("amazon_s3_base_path_ecs") + // If all these variables are missing then we are testing against the internal fixture instead, which has the following // credentials hard-coded in. -if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath) { +if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath + && !s3EC2Bucket && !s3EC2BasePath + && !s3ECSBucket && !s3ECSBasePath) { s3PermanentAccessKey = 's3_integration_test_permanent_access_key' s3PermanentSecretKey = 's3_integration_test_permanent_secret_key' s3PermanentBucket = 'permanent-bucket-test' s3PermanentBasePath = 'integration_test' + s3EC2Bucket = 'ec2-bucket-test' + s3EC2BasePath = 'integration_test' + + s3ECSBucket = 'ecs-bucket-test' + s3ECSBasePath = 'integration_test' + useFixture = true -} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath) { +} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath + || !s3EC2Bucket || !s3EC2BasePath + || !s3ECSBucket || !s3ECSBasePath) { throw new IllegalArgumentException("not all options specified to run against external S3 service") } @@ -274,24 +290,56 @@ if (useFixture && minioDistribution) { integTestMinioRunner.dependsOn(startMinio) integTestMinioRunner.finalizedBy(stopMinio) // Minio only supports a single access key, see https://github.com/minio/minio/pull/5968 - integTestMinioRunner.systemProperty 'tests.rest.blacklist', 'repository_s3/30_repository_temporary_credentials/*' + integTestMinioRunner.systemProperty 'tests.rest.blacklist', [ + 'repository_s3/30_repository_temporary_credentials/*', + 'repository_s3/40_repository_ec2_credentials/*', + 'repository_s3/50_repository_ecs_credentials/*' + ].join(",") project.check.dependsOn(integTestMinio) } +File parentFixtures = new File(project.buildDir, "fixtures") +File s3FixtureFile = new File(parentFixtures, 's3Fixture.properties') + +task s3FixtureProperties { + outputs.file(s3FixtureFile) + def s3FixtureOptions = [ + "tests.seed" : project.testSeed, + "s3Fixture.permanent_bucket_name" : s3PermanentBucket, + "s3Fixture.permanent_key" : s3PermanentAccessKey, + "s3Fixture.temporary_bucket_name" : s3TemporaryBucket, + "s3Fixture.temporary_key" : s3TemporaryAccessKey, + "s3Fixture.temporary_session_token": s3TemporarySessionToken, + "s3Fixture.ec2_bucket_name" : s3EC2Bucket, + "s3Fixture.ecs_bucket_name" : s3ECSBucket + ] + + doLast { + file(s3FixtureFile).text = s3FixtureOptions.collect { k, v -> "$k = $v" }.join("\n") + } +} + /** A task to start the AmazonS3Fixture which emulates an S3 service **/ task s3Fixture(type: AntFixture) { dependsOn testClasses + dependsOn s3FixtureProperties + inputs.file(s3FixtureFile) + env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" executable = new File(project.runtimeJavaHome, 'bin/java') - args 'org.elasticsearch.repositories.s3.AmazonS3Fixture', baseDir, s3PermanentBucket, s3TemporaryBucket + args 'org.elasticsearch.repositories.s3.AmazonS3Fixture', baseDir, s3FixtureFile.getAbsolutePath() } Map expansions = [ 'permanent_bucket': s3PermanentBucket, 'permanent_base_path': s3PermanentBasePath, 'temporary_bucket': s3TemporaryBucket, - 'temporary_base_path': s3TemporaryBasePath + 'temporary_base_path': s3TemporaryBasePath, + 'ec2_bucket': s3EC2Bucket, + 'ec2_base_path': s3EC2BasePath, + 'ecs_bucket': s3ECSBucket, + 'ecs_base_path': s3ECSBasePath ] processTestResources { @@ -319,11 +367,43 @@ integTestCluster { /* Use a closure on the string to delay evaluation until tests are executed */ setting 's3.client.integration_test_permanent.endpoint', "http://${-> s3Fixture.addressAndPort}" setting 's3.client.integration_test_temporary.endpoint', "http://${-> s3Fixture.addressAndPort}" + setting 's3.client.integration_test_ec2.endpoint', "http://${-> s3Fixture.addressAndPort}" + + // to redirect InstanceProfileCredentialsProvider to custom auth point + systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", "http://${-> s3Fixture.addressAndPort}" } else { println "Using an external service to test the repository-s3 plugin" } } +integTestRunner.systemProperty 'tests.rest.blacklist', 'repository_s3/50_repository_ecs_credentials/*' + +/// +RestIntegTestTask integTestECS = project.tasks.create('integTestECS', RestIntegTestTask.class) { + description = "Runs tests using the ECS repository." +} + +// The following closure must execute before the afterEvaluate block in the constructor of the following integrationTest tasks: +project.afterEvaluate { + ClusterConfiguration cluster = project.extensions.getByName('integTestECSCluster') as ClusterConfiguration + cluster.dependsOn(project.s3Fixture) + + cluster.setting 's3.client.integration_test_ecs.endpoint', "http://${-> s3Fixture.addressAndPort}" + + Task integTestECSTask = project.tasks.getByName('integTestECS') + integTestECSTask.clusterConfig.plugin(project.path) + integTestECSTask.clusterConfig.environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', + "http://${-> s3Fixture.addressAndPort}/ecs_credentials_endpoint" + integTestECSRunner.systemProperty 'tests.rest.blacklist', [ + 'repository_s3/10_basic/*', + 'repository_s3/20_repository_permanent_credentials/*', + 'repository_s3/30_repository_temporary_credentials/*', + 'repository_s3/40_repository_ec2_credentials/*' + ].join(",") +} +project.check.dependsOn(integTestECS) +/// + thirdPartyAudit.excludes = [ // classes are missing 'javax.servlet.ServletContextEvent', diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index 91a7a30024b78..b177686bd71a6 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -22,7 +22,7 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.auth.InstanceProfileCredentialsProvider; +import com.amazonaws.auth.EC2ContainerCredentialsProviderWrapper; import com.amazonaws.http.IdleConnectionReaper; import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.services.s3.AmazonS3; @@ -156,10 +156,11 @@ protected synchronized void releaseCachedClients() { } static class PrivilegedInstanceProfileCredentialsProvider implements AWSCredentialsProvider { - private final InstanceProfileCredentialsProvider credentials; + private final AWSCredentialsProvider credentials; private PrivilegedInstanceProfileCredentialsProvider() { - this.credentials = new InstanceProfileCredentialsProvider(); + // InstanceProfileCredentialsProvider as last item of chain + this.credentials = new EC2ContainerCredentialsProviderWrapper(); } @Override diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java index 9b38669da2563..a411a1c53cf36 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java @@ -18,6 +18,14 @@ */ package org.elasticsearch.repositories.s3; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.common.TriFunction; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.test.fixture.AbstractHttpFixture; import com.amazonaws.util.DateUtils; import org.elasticsearch.common.Strings; @@ -26,20 +34,26 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; -import org.elasticsearch.test.fixture.AbstractHttpFixture; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; +import java.util.Properties; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLength; +import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLengthBetween; import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.Objects.requireNonNull; /** * {@link AmazonS3Fixture} emulates an AWS S3 service @@ -47,88 +61,129 @@ * he implementation is based on official documentation available at https://docs.aws.amazon.com/AmazonS3/latest/API/. */ public class AmazonS3Fixture extends AbstractHttpFixture { + private static final String AUTH = "AUTH"; + private static final String NON_AUTH = "NON_AUTH"; + + private static final String EC2_PROFILE = "ec2Profile"; + + private final Properties properties; + private final Random random; /** List of the buckets stored on this test server **/ private final Map buckets = ConcurrentCollections.newConcurrentMap(); /** Request handlers for the requests made by the S3 client **/ private final PathTrie handlers; - private final String permanentBucketName; - private final String temporaryBucketName; /** * Creates a {@link AmazonS3Fixture} */ - private AmazonS3Fixture(final String workingDir, final String permanentBucketName, final String temporaryBucketName) { + private AmazonS3Fixture(final String workingDir, Properties properties) { super(workingDir); - this.permanentBucketName = permanentBucketName; - this.temporaryBucketName = temporaryBucketName; + this.properties = properties; + this.random = new Random(Long.parseUnsignedLong(requireNonNull(properties.getProperty("tests.seed")), 16)); + + new Bucket("s3Fixture.permanent", false); + new Bucket("s3Fixture.temporary", true); + final Bucket ec2Bucket = new Bucket("s3Fixture.ec2", + randomAsciiAlphanumOfLength(random, 10), randomAsciiAlphanumOfLength(random, 10)); - this.buckets.put(permanentBucketName, new Bucket(permanentBucketName)); - this.buckets.put(temporaryBucketName, new Bucket(temporaryBucketName)); - this.handlers = defaultHandlers(buckets); + final Bucket ecsBucket = new Bucket("s3Fixture.ecs", + randomAsciiAlphanumOfLength(random, 10), randomAsciiAlphanumOfLength(random, 10)); + + this.handlers = defaultHandlers(buckets, ec2Bucket, ecsBucket); + } + + private static String nonAuthPath(Request request) { + return nonAuthPath(request.getMethod(), request.getPath()); + } + + private static String nonAuthPath(String method, String path) { + return NON_AUTH + " " + method + " " + path; + } + + private static String authPath(Request request) { + return authPath(request.getMethod(), request.getPath()); + } + + private static String authPath(String method, String path) { + return AUTH + " " + method + " " + path; } @Override protected Response handle(final Request request) throws IOException { - final RequestHandler handler = handlers.retrieve(request.getMethod() + " " + request.getPath(), request.getParameters()); + final String nonAuthorizedPath = nonAuthPath(request); + final RequestHandler nonAuthorizedHandler = handlers.retrieve(nonAuthorizedPath, request.getParameters()); + if (nonAuthorizedHandler != null) { + return nonAuthorizedHandler.handle(request); + } + + final String authorizedPath = authPath(request); + final RequestHandler handler = handlers.retrieve(authorizedPath, request.getParameters()); if (handler != null) { - final String authorization = request.getHeader("Authorization"); - final String permittedBucket; - if (authorization.contains("s3_integration_test_permanent_access_key")) { - final String sessionToken = request.getHeader("x-amz-security-token"); + final String bucketName = request.getParam("bucket"); + if (bucketName == null) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad access key", ""); + } + final Bucket bucket = buckets.get(bucketName); + if (bucket == null) { + return newBucketNotFoundError(request.getId(), bucketName); + } + final Response authResponse = authenticateBucket(request, bucket); + if (authResponse != null) { + return authResponse; + } + + return handler.handle(request); + + } else { + return newInternalError(request.getId(), "No handler defined for request [" + request + "]"); + } + } + + private Response authenticateBucket(Request request, Bucket bucket) { + final String authorization = request.getHeader("Authorization"); + if (authorization == null) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad access key", ""); + } + if (authorization.contains(bucket.key)) { + final String sessionToken = request.getHeader("x-amz-security-token"); + if (bucket.token == null) { if (sessionToken != null) { return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Unexpected session token", ""); } - permittedBucket = permanentBucketName; - } else if (authorization.contains("s3_integration_test_temporary_access_key")) { - final String sessionToken = request.getHeader("x-amz-security-token"); + } else { if (sessionToken == null) { return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "No session token", ""); } - if (sessionToken.equals("s3_integration_test_temporary_session_token") == false) { + if (sessionToken.equals(bucket.token) == false) { return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad session token", ""); } - permittedBucket = temporaryBucketName; - } else { - return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad access key", ""); - } - - final String bucket = request.getParam("bucket"); - if (bucket != null && permittedBucket.equals(bucket) == false) { - // allow a null bucket to support the multi-object-delete API which - // passes the bucket name in the host header instead of the URL. - if (buckets.containsKey(bucket)) { - return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad bucket", ""); - } else { - return newBucketNotFoundError(request.getId(), bucket); - } } - return handler.handle(request); - - } else { - return newInternalError(request.getId(), "No handler defined for request [" + request + "]"); } + return null; } public static void main(final String[] args) throws Exception { - if (args == null || args.length != 3) { - throw new IllegalArgumentException( - "AmazonS3Fixture "); + if (args == null || args.length != 2) { + throw new IllegalArgumentException("AmazonS3Fixture "); } - - final AmazonS3Fixture fixture = new AmazonS3Fixture(args[0], args[1], args[2]); + final Properties properties = new Properties(); + try (InputStream is = Files.newInputStream(PathUtils.get(args[1]))) { + properties.load(is); + } + final AmazonS3Fixture fixture = new AmazonS3Fixture(args[0], properties); fixture.listen(); } /** Builds the default request handlers **/ - private static PathTrie defaultHandlers(final Map buckets) { + private PathTrie defaultHandlers(final Map buckets, final Bucket ec2Bucket, final Bucket ecsBucket) { final PathTrie handlers = new PathTrie<>(RestUtils.REST_DECODER); // HEAD Object // // https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectHEAD.html - objectsPaths("HEAD /{bucket}").forEach(path -> + objectsPaths(authPath(HttpHead.METHOD_NAME, "/{bucket}")).forEach(path -> handlers.insert(path, (request) -> { final String bucketName = request.getParam("bucket"); @@ -150,7 +205,7 @@ private static PathTrie defaultHandlers(final Map + objectsPaths(authPath(HttpPut.METHOD_NAME, "/{bucket}")).forEach(path -> handlers.insert(path, (request) -> { final String destBucketName = request.getParam("bucket"); @@ -200,7 +255,7 @@ private static PathTrie defaultHandlers(final Map + objectsPaths(authPath(HttpDelete.METHOD_NAME, "/{bucket}")).forEach(path -> handlers.insert(path, (request) -> { final String bucketName = request.getParam("bucket"); @@ -218,7 +273,7 @@ private static PathTrie defaultHandlers(final Map + objectsPaths(authPath(HttpGet.METHOD_NAME, "/{bucket}")).forEach(path -> handlers.insert(path, (request) -> { final String bucketName = request.getParam("bucket"); @@ -239,7 +294,7 @@ private static PathTrie defaultHandlers(final Map { + handlers.insert(authPath(HttpHead.METHOD_NAME, "/{bucket}"), (request) -> { String bucket = request.getParam("bucket"); if (Strings.hasText(bucket) && buckets.containsKey(bucket)) { return new Response(RestStatus.OK.getStatus(), TEXT_PLAIN_CONTENT_TYPE, EMPTY_BYTE); @@ -251,7 +306,7 @@ private static PathTrie defaultHandlers(final Map { + handlers.insert(authPath(HttpGet.METHOD_NAME, "/{bucket}/"), (request) -> { final String bucketName = request.getParam("bucket"); final Bucket bucket = buckets.get(bucketName); @@ -269,7 +324,7 @@ private static PathTrie defaultHandlers(final Map { + handlers.insert(nonAuthPath(HttpPost.METHOD_NAME, "/"), (request) -> { final List deletes = new ArrayList<>(); final List errors = new ArrayList<>(); @@ -292,7 +347,12 @@ private static PathTrie defaultHandlers(final Map defaultHandlers(final Map credentialResponseFunction = (profileName, key, token) -> { + final Date expiration = new Date(new Date().getTime() + TimeUnit.DAYS.toMillis(1)); + final String response = "{" + + "\"AccessKeyId\": \"" + key + "\"," + + "\"Expiration\": \"" + DateUtils.formatISO8601Date(expiration) + "\"," + + "\"RoleArn\": \"" + randomAsciiAlphanumOfLengthBetween(random, 1, 20) + "\"," + + "\"SecretAccessKey\": \"" + randomAsciiAlphanumOfLengthBetween(random, 1, 20) + "\"," + + "\"Token\": \"" + token + "\"" + + "}"; + + final Map headers = new HashMap<>(contentType("application/json")); + return new Response(RestStatus.OK.getStatus(), headers, response.getBytes(UTF_8)); + }; + + // GET + // + // http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html + handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/latest/meta-data/iam/security-credentials/"), (request) -> { + final String response = EC2_PROFILE; + + final Map headers = new HashMap<>(contentType("text/plain")); + return new Response(RestStatus.OK.getStatus(), headers, response.getBytes(UTF_8)); + }); + + // GET + // + // http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html + handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/latest/meta-data/iam/security-credentials/{profileName}"), (request) -> { + final String profileName = request.getParam("profileName"); + if (EC2_PROFILE.equals(profileName) == false) { + return new Response(RestStatus.NOT_FOUND.getStatus(), new HashMap<>(), "unknown profile".getBytes(UTF_8)); + } + return credentialResponseFunction.apply(profileName, ec2Bucket.key, ec2Bucket.token); + }); + + // GET + // + // https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html + handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/ecs_credentials_endpoint"), + (request) -> credentialResponseFunction.apply("CPV_ECS", ecsBucket.key, ecsBucket.token)); + + return handlers; } + private static String prop(Properties properties, String propertyName) { + return requireNonNull(properties.getProperty(propertyName), + "property '" + propertyName + "' is missing"); + } + /** * Represents a S3 bucket. */ - static class Bucket { + class Bucket { /** Bucket name **/ final String name; + final String key; + + final String token; + /** Blobs contained in the bucket **/ final Map objects; - Bucket(final String name) { - this.name = Objects.requireNonNull(name); + private Bucket(final String prefix, final boolean tokenRequired) { + this(prefix, prop(properties, prefix + "_key"), + tokenRequired ? prop(properties, prefix + "_session_token") : null); + } + + private Bucket(final String prefix, final String key, final String token) { + this.name = prop(properties, prefix + "_bucket_name"); + this.key = key; + this.token = token; + this.objects = ConcurrentCollections.newConcurrentMap(); + if (buckets.put(name, this) != null) { + throw new IllegalArgumentException("bucket " + name + " is already registered"); + } } } diff --git a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml new file mode 100644 index 0000000000000..2df3b8290a19b --- /dev/null +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml @@ -0,0 +1,243 @@ +# Integration tests for repository-s3 + +--- +setup: + + # Register repository with ec2 credentials + - do: + snapshot.create_repository: + repository: repository_ec2 + body: + type: s3 + settings: + bucket: ${ec2_bucket} + client: integration_test_ec2 + base_path: ${ec2_base_path} + canned_acl: private + storage_class: standard + +--- +"Snapshot and Restore with repository-s3 using ec2 credentials": + + # Get repository + - do: + snapshot.get_repository: + repository: repository_ec2 + + - match: { repository_ec2.settings.bucket : ${ec2_bucket} } + - match: { repository_ec2.settings.client : "integration_test_ec2" } + - match: { repository_ec2.settings.base_path : ${ec2_base_path} } + - match: { repository_ec2.settings.canned_acl : "private" } + - match: { repository_ec2.settings.storage_class : "standard" } + - is_false: repository_ec2.settings.access_key + - is_false: repository_ec2.settings.secret_key + - is_false: repository_ec2.settings.session_token + + # Index documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 1 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 2 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 3 + - snapshot: one + + - do: + count: + index: docs + + - match: {count: 3} + + # Create a first snapshot + - do: + snapshot.create: + repository: repository_ec2 + snapshot: snapshot-one + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-one } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.include_global_state: true } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.status: + repository: repository_ec2 + snapshot: snapshot-one + + - is_true: snapshots + - match: { snapshots.0.snapshot: snapshot-one } + - match: { snapshots.0.state : SUCCESS } + + # Index more documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 4 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 5 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 6 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 7 + - snapshot: two + + - do: + count: + index: docs + + - match: {count: 7} + + # Create a second snapshot + - do: + snapshot.create: + repository: repository_ec2 + snapshot: snapshot-two + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-two } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.get: + repository: repository_ec2 + snapshot: snapshot-one,snapshot-two + + - is_true: snapshots + - match: { snapshots.0.state : SUCCESS } + - match: { snapshots.1.state : SUCCESS } + + # Delete the index + - do: + indices.delete: + index: docs + + # Restore the second snapshot + - do: + snapshot.restore: + repository: repository_ec2 + snapshot: snapshot-two + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 7} + + # Delete the index again + - do: + indices.delete: + index: docs + + # Restore the first snapshot + - do: + snapshot.restore: + repository: repository_ec2 + snapshot: snapshot-one + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 3} + + # Remove the snapshots + - do: + snapshot.delete: + repository: repository_ec2 + snapshot: snapshot-two + + - do: + snapshot.delete: + repository: repository_ec2 + snapshot: snapshot-one + +--- +"Register a repository with a non existing bucket": + + - do: + catch: /repository_exception/ + snapshot.create_repository: + repository: repository_ec2 + body: + type: s3 + settings: + bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE + client: integration_test_temporary + +--- +"Register a repository with a non existing client": + + - do: + catch: /repository_exception/ + snapshot.create_repository: + repository: repository_ec2 + body: + type: s3 + settings: + bucket: repository_ec2 + client: unknown + +--- +"Get a non existing snapshot": + + - do: + catch: /snapshot_missing_exception/ + snapshot.get: + repository: repository_ec2 + snapshot: missing + +--- +"Delete a non existing snapshot": + + - do: + catch: /snapshot_missing_exception/ + snapshot.delete: + repository: repository_ec2 + snapshot: missing + +--- +"Restore a non existing snapshot": + + - do: + catch: /snapshot_restore_exception/ + snapshot.restore: + repository: repository_ec2 + snapshot: missing + wait_for_completion: true + +--- +teardown: + + # Remove our repository + - do: + snapshot.delete_repository: + repository: repository_ec2 diff --git a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml new file mode 100644 index 0000000000000..54929e6e3ad82 --- /dev/null +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml @@ -0,0 +1,243 @@ +# Integration tests for repository-s3 + +--- +setup: + + # Register repository with ecs credentials + - do: + snapshot.create_repository: + repository: repository_ecs + body: + type: s3 + settings: + bucket: ${ecs_bucket} + client: integration_test_ecs + base_path: ${ecs_base_path} + canned_acl: private + storage_class: standard + +--- +"Snapshot and Restore with repository-s3 using ecs credentials": + + # Get repository + - do: + snapshot.get_repository: + repository: repository_ecs + + - match: { repository_ecs.settings.bucket : ${ecs_bucket} } + - match: { repository_ecs.settings.client : "integration_test_ecs" } + - match: { repository_ecs.settings.base_path : ${ecs_base_path} } + - match: { repository_ecs.settings.canned_acl : "private" } + - match: { repository_ecs.settings.storage_class : "standard" } + - is_false: repository_ecs.settings.access_key + - is_false: repository_ecs.settings.secret_key + - is_false: repository_ecs.settings.session_token + + # Index documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 1 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 2 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 3 + - snapshot: one + + - do: + count: + index: docs + + - match: {count: 3} + + # Create a first snapshot + - do: + snapshot.create: + repository: repository_ecs + snapshot: snapshot-one + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-one } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.include_global_state: true } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.status: + repository: repository_ecs + snapshot: snapshot-one + + - is_true: snapshots + - match: { snapshots.0.snapshot: snapshot-one } + - match: { snapshots.0.state : SUCCESS } + + # Index more documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 4 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 5 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 6 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 7 + - snapshot: two + + - do: + count: + index: docs + + - match: {count: 7} + + # Create a second snapshot + - do: + snapshot.create: + repository: repository_ecs + snapshot: snapshot-two + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-two } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.get: + repository: repository_ecs + snapshot: snapshot-one,snapshot-two + + - is_true: snapshots + - match: { snapshots.0.state : SUCCESS } + - match: { snapshots.1.state : SUCCESS } + + # Delete the index + - do: + indices.delete: + index: docs + + # Restore the second snapshot + - do: + snapshot.restore: + repository: repository_ecs + snapshot: snapshot-two + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 7} + + # Delete the index again + - do: + indices.delete: + index: docs + + # Restore the first snapshot + - do: + snapshot.restore: + repository: repository_ecs + snapshot: snapshot-one + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 3} + + # Remove the snapshots + - do: + snapshot.delete: + repository: repository_ecs + snapshot: snapshot-two + + - do: + snapshot.delete: + repository: repository_ecs + snapshot: snapshot-one + +--- +"Register a repository with a non existing bucket": + + - do: + catch: /repository_exception/ + snapshot.create_repository: + repository: repository_ecs + body: + type: s3 + settings: + bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE + client: integration_test_temporary + +--- +"Register a repository with a non existing client": + + - do: + catch: /repository_exception/ + snapshot.create_repository: + repository: repository_ecs + body: + type: s3 + settings: + bucket: repository_ecs + client: unknown + +--- +"Get a non existing snapshot": + + - do: + catch: /snapshot_missing_exception/ + snapshot.get: + repository: repository_ecs + snapshot: missing + +--- +"Delete a non existing snapshot": + + - do: + catch: /snapshot_missing_exception/ + snapshot.delete: + repository: repository_ecs + snapshot: missing + +--- +"Restore a non existing snapshot": + + - do: + catch: /snapshot_restore_exception/ + snapshot.restore: + repository: repository_ecs + snapshot: missing + wait_for_completion: true + +--- +teardown: + + # Remove our repository + - do: + snapshot.delete_repository: + repository: repository_ecs diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 29aec900cefa9..6bfa4de8d4adf 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -171,7 +171,7 @@ public void testSearchSkipUnavailable() throws IOException { assertEquals(10, response.getHits().totalHits); assertEquals(10, response.getHits().getHits().length); String scrollId = response.getScrollId(); - SearchResponse scrollResponse = restHighLevelClient.searchScroll(new SearchScrollRequest(scrollId), RequestOptions.DEFAULT); + SearchResponse scrollResponse = restHighLevelClient.scroll(new SearchScrollRequest(scrollId), RequestOptions.DEFAULT); assertSame(SearchResponse.Clusters.EMPTY, scrollResponse.getClusters()); assertEquals(10, scrollResponse.getHits().totalHits); assertEquals(0, scrollResponse.getHits().getHits().length); @@ -206,7 +206,7 @@ public void testSearchSkipUnavailable() throws IOException { assertEquals(10, response.getHits().totalHits); assertEquals(10, response.getHits().getHits().length); String scrollId = response.getScrollId(); - SearchResponse scrollResponse = restHighLevelClient.searchScroll(new SearchScrollRequest(scrollId), RequestOptions.DEFAULT); + SearchResponse scrollResponse = restHighLevelClient.scroll(new SearchScrollRequest(scrollId), RequestOptions.DEFAULT); assertSame(SearchResponse.Clusters.EMPTY, scrollResponse.getClusters()); assertEquals(10, scrollResponse.getHits().totalHits); assertEquals(0, scrollResponse.getHits().getHits().length); diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 704136eb4cf27..4a0c91469629d 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -30,6 +30,7 @@ dependencies { compile "junit:junit:${versions.junit}" compile "org.hamcrest:hamcrest-core:${versions.hamcrest}" compile "org.hamcrest:hamcrest-library:${versions.hamcrest}" + compile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" @@ -81,7 +82,7 @@ tasks.dependencyLicenses.enabled = false tasks.dependenciesInfo.enabled = false tasks.thirdPartyAudit.excludes = [ - //commons-logging optional dependencies + // commons-logging optional dependencies 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', 'org.apache.log.Logger', @@ -89,7 +90,7 @@ tasks.thirdPartyAudit.excludes = [ 'org.apache.log4j.Level', 'org.apache.log4j.Logger', 'org.apache.log4j.Priority', - //commons-logging provided dependencies + // commons-logging provided dependencies 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener' ] diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/PackagingTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/PackagingTests.java index fa7f8e8ef78c5..57b647e1207c2 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/PackagingTests.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/PackagingTests.java @@ -19,10 +19,19 @@ package org.elasticsearch.packaging; +import org.elasticsearch.packaging.test.DefaultDebPreservationTests; +import org.elasticsearch.packaging.test.DefaultDebBasicTests; +import org.elasticsearch.packaging.test.DefaultRpmPreservationTests; +import org.elasticsearch.packaging.test.DefaultRpmBasicTests; +import org.elasticsearch.packaging.test.OssDebPreservationTests; +import org.elasticsearch.packaging.test.OssDebBasicTests; +import org.elasticsearch.packaging.test.OssRpmPreservationTests; +import org.elasticsearch.packaging.test.OssRpmBasicTests; import org.elasticsearch.packaging.test.OssTarTests; import org.elasticsearch.packaging.test.OssZipTests; import org.elasticsearch.packaging.test.DefaultTarTests; import org.elasticsearch.packaging.test.DefaultZipTests; +import org.elasticsearch.packaging.test.PackageDependenciesTests; import org.junit.runner.RunWith; import org.junit.runners.Suite; @@ -31,8 +40,17 @@ @RunWith(Suite.class) @SuiteClasses({ DefaultTarTests.class, - DefaultZipTests.class, OssTarTests.class, - OssZipTests.class + DefaultZipTests.class, + OssZipTests.class, + PackageDependenciesTests.class, + DefaultRpmBasicTests.class, + OssRpmBasicTests.class, + DefaultDebBasicTests.class, + OssDebBasicTests.class, + DefaultDebPreservationTests.class, + OssDebPreservationTests.class, + DefaultRpmPreservationTests.class, + OssRpmPreservationTests.class }) public class PackagingTests {} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java index df5e8cf995d86..3aada7837d8ae 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java @@ -19,6 +19,7 @@ package org.elasticsearch.packaging.test; +import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering; import org.apache.http.client.fluent.Request; import org.elasticsearch.packaging.util.Archives; import org.elasticsearch.packaging.util.Platforms; @@ -27,9 +28,6 @@ import org.elasticsearch.packaging.util.Shell.Result; import org.junit.Before; import org.junit.BeforeClass; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; import org.elasticsearch.packaging.util.Distribution; import org.elasticsearch.packaging.util.Installation; @@ -67,8 +65,8 @@ * Tests that apply to the archive distributions (tar, zip). To add a case for a distribution, subclass and * override {@link ArchiveTestCase#distribution()}. These tests should be the same across all archive distributions */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public abstract class ArchiveTestCase { +@TestCaseOrdering(TestCaseOrdering.AlphabeticOrder.class) +public abstract class ArchiveTestCase extends PackagingTestCase { private static Installation installation; @@ -86,13 +84,11 @@ public void onlyCompatibleDistributions() { assumeTrue("only compatible distributions", distribution().packaging.compatible); } - @Test public void test10Install() { installation = installArchive(distribution()); verifyArchiveInstallation(installation, distribution()); } - @Test public void test20PluginsListWithNoPlugins() { assumeThat(installation, is(notNullValue())); @@ -103,7 +99,6 @@ public void test20PluginsListWithNoPlugins() { assertThat(r.stdout, isEmptyString()); } - @Test public void test30AbortWhenJavaMissing() { assumeThat(installation, is(notNullValue())); @@ -146,7 +141,6 @@ public void test30AbortWhenJavaMissing() { }); } - @Test public void test40CreateKeystoreManually() { assumeThat(installation, is(notNullValue())); @@ -180,7 +174,6 @@ public void test40CreateKeystoreManually() { }); } - @Test public void test50StartAndStop() throws IOException { assumeThat(installation, is(notNullValue())); @@ -198,7 +191,6 @@ public void test50StartAndStop() throws IOException { Archives.stopElasticsearch(installation); } - @Test public void test60AutoCreateKeystore() { assumeThat(installation, is(notNullValue())); @@ -218,7 +210,6 @@ public void test60AutoCreateKeystore() { }); } - @Test public void test70CustomPathConfAndJvmOptions() throws IOException { assumeThat(installation, is(notNullValue())); @@ -268,7 +259,6 @@ public void test70CustomPathConfAndJvmOptions() throws IOException { } } - @Test public void test80RelativePathConf() throws IOException { assumeThat(installation, is(notNullValue())); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DebPreservationTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DebPreservationTestCase.java new file mode 100644 index 0000000000000..c584f5d2e44fa --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DebPreservationTestCase.java @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering; +import org.elasticsearch.packaging.util.Distribution; +import org.elasticsearch.packaging.util.Installation; +import org.elasticsearch.packaging.util.Shell; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.nio.file.Files; +import java.nio.file.Paths; + +import static org.elasticsearch.packaging.util.Cleanup.cleanEverything; +import static org.elasticsearch.packaging.util.FileUtils.assertPathsDontExist; +import static org.elasticsearch.packaging.util.FileUtils.assertPathsExist; +import static org.elasticsearch.packaging.util.Packages.SYSVINIT_SCRIPT; +import static org.elasticsearch.packaging.util.Packages.assertInstalled; +import static org.elasticsearch.packaging.util.Packages.assertRemoved; +import static org.elasticsearch.packaging.util.Packages.install; +import static org.elasticsearch.packaging.util.Packages.remove; +import static org.elasticsearch.packaging.util.Packages.packageStatus; +import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation; +import static org.elasticsearch.packaging.util.Platforms.isDPKG; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeThat; +import static org.junit.Assume.assumeTrue; + +@TestCaseOrdering(TestCaseOrdering.AlphabeticOrder.class) +public abstract class DebPreservationTestCase extends PackagingTestCase { + + private static Installation installation; + + protected abstract Distribution distribution(); + + @BeforeClass + public static void cleanup() { + installation = null; + cleanEverything(); + } + + @Before + public void onlyCompatibleDistributions() { + assumeTrue("only dpkg platforms", isDPKG()); + assumeTrue("only compatible distributions", distribution().packaging.compatible); + } + + public void test10Install() { + assertRemoved(distribution()); + installation = install(distribution()); + assertInstalled(distribution()); + verifyPackageInstallation(installation, distribution()); + } + + public void test20Remove() { + assumeThat(installation, is(notNullValue())); + + remove(distribution()); + + // some config files were not removed + + assertPathsExist( + installation.config, + installation.config("elasticsearch.yml"), + installation.config("jvm.options"), + installation.config("log4j2.properties") + ); + + // keystore was removed + + assertPathsDontExist( + installation.config("elasticsearch.keystore"), + installation.config(".elasticsearch.keystore.initial_md5sum") + ); + + // doc files were removed + + assertPathsDontExist( + Paths.get("/usr/share/doc/" + distribution().flavor.name), + Paths.get("/usr/share/doc/" + distribution().flavor.name + "/copyright") + ); + + // sysvinit service file was not removed + assertTrue(Files.exists(SYSVINIT_SCRIPT)); + + // defaults file was not removed + assertTrue(Files.exists(installation.envFile)); + } + + public void test30Purge() { + assumeThat(installation, is(notNullValue())); + + final Shell sh = new Shell(); + sh.run("dpkg --purge " + distribution().flavor.name); + + assertRemoved(distribution()); + + assertPathsDontExist( + installation.config, + installation.envFile, + SYSVINIT_SCRIPT + ); + + assertThat(packageStatus(distribution()).exitCode, is(1)); + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebBasicTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebBasicTests.java new file mode 100644 index 0000000000000..cd40c0e9e814d --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebBasicTests.java @@ -0,0 +1,31 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class DefaultDebBasicTests extends PackageTestCase { + + @Override + protected Distribution distribution() { + return Distribution.DEFAULT_DEB; + } + +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebPreservationTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebPreservationTests.java new file mode 100644 index 0000000000000..d8b8c7f562bd9 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebPreservationTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class DefaultDebPreservationTests extends DebPreservationTestCase { + + @Override + protected Distribution distribution() { + return Distribution.DEFAULT_DEB; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmBasicTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmBasicTests.java new file mode 100644 index 0000000000000..a8ce7b48685bd --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmBasicTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class DefaultRpmBasicTests extends PackageTestCase { + + @Override + protected Distribution distribution() { + return Distribution.DEFAULT_RPM; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmPreservationTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmPreservationTests.java new file mode 100644 index 0000000000000..633492cce6c2f --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmPreservationTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class DefaultRpmPreservationTests extends RpmPreservationTestCase { + + @Override + protected Distribution distribution() { + return Distribution.DEFAULT_RPM; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebBasicTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebBasicTests.java new file mode 100644 index 0000000000000..5d779ac46536e --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebBasicTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class OssDebBasicTests extends PackageTestCase { + + @Override + protected Distribution distribution() { + return Distribution.OSS_DEB; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebPreservationTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebPreservationTests.java new file mode 100644 index 0000000000000..cfce73bb16000 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebPreservationTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class OssDebPreservationTests extends DebPreservationTestCase { + + @Override + protected Distribution distribution() { + return Distribution.OSS_DEB; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmBasicTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmBasicTests.java new file mode 100644 index 0000000000000..d3320c0795589 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmBasicTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class OssRpmBasicTests extends PackageTestCase { + + @Override + protected Distribution distribution() { + return Distribution.OSS_RPM; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmPreservationTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmPreservationTests.java new file mode 100644 index 0000000000000..87071d687d03e --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmPreservationTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class OssRpmPreservationTests extends RpmPreservationTestCase { + + @Override + protected Distribution distribution() { + return Distribution.OSS_RPM; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageDependenciesTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageDependenciesTests.java new file mode 100644 index 0000000000000..6861ef0a3ff1e --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageDependenciesTests.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Platforms; +import org.elasticsearch.packaging.util.Shell; +import org.elasticsearch.packaging.util.Shell.Result; + +import java.util.regex.Pattern; + +import static junit.framework.TestCase.assertTrue; +import static org.elasticsearch.packaging.util.Distribution.DEFAULT_DEB; +import static org.elasticsearch.packaging.util.Distribution.DEFAULT_RPM; +import static org.elasticsearch.packaging.util.Distribution.OSS_DEB; +import static org.elasticsearch.packaging.util.Distribution.OSS_RPM; +import static org.elasticsearch.packaging.util.FileUtils.getDistributionFile; +import static org.junit.Assume.assumeTrue; + +/** + * Tests that linux packages correctly declare their dependencies and their conflicts + */ +public class PackageDependenciesTests extends PackagingTestCase { + + public void testDebDependencies() { + assumeTrue(Platforms.isDPKG()); + + final Shell sh = new Shell(); + + final Result defaultResult = sh.run("dpkg -I " + getDistributionFile(DEFAULT_DEB)); + final Result ossResult = sh.run("dpkg -I " + getDistributionFile(OSS_DEB)); + + assertTrue(Pattern.compile("(?m)^ Depends:.*bash.*").matcher(defaultResult.stdout).find()); + assertTrue(Pattern.compile("(?m)^ Depends:.*bash.*").matcher(ossResult.stdout).find()); + + assertTrue(Pattern.compile("(?m)^ Conflicts: elasticsearch-oss$").matcher(defaultResult.stdout).find()); + assertTrue(Pattern.compile("(?m)^ Conflicts: elasticsearch$").matcher(ossResult.stdout).find()); + } + + public void testRpmDependencies() { + assumeTrue(Platforms.isRPM()); + + final Shell sh = new Shell(); + + final Result defaultDeps = sh.run("rpm -qpR " + getDistributionFile(DEFAULT_RPM)); + final Result ossDeps = sh.run("rpm -qpR " + getDistributionFile(OSS_RPM)); + + assertTrue(Pattern.compile("(?m)^/bin/bash\\s*$").matcher(defaultDeps.stdout).find()); + assertTrue(Pattern.compile("(?m)^/bin/bash\\s*$").matcher(ossDeps.stdout).find()); + + final Result defaultConflicts = sh.run("rpm -qp --conflicts " + getDistributionFile(DEFAULT_RPM)); + final Result ossConflicts = sh.run("rpm -qp --conflicts " + getDistributionFile(OSS_RPM)); + + assertTrue(Pattern.compile("(?m)^elasticsearch-oss\\s*$").matcher(defaultConflicts.stdout).find()); + assertTrue(Pattern.compile("(?m)^elasticsearch\\s*$").matcher(ossConflicts.stdout).find()); + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java new file mode 100644 index 0000000000000..28a767e95aef2 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java @@ -0,0 +1,168 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering; +import org.elasticsearch.packaging.util.Distribution; +import org.elasticsearch.packaging.util.Installation; +import org.elasticsearch.packaging.util.Shell; + +import org.elasticsearch.packaging.util.Shell.Result; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.nio.file.Files; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static org.elasticsearch.packaging.util.Cleanup.cleanEverything; +import static org.elasticsearch.packaging.util.FileUtils.assertPathsDontExist; +import static org.elasticsearch.packaging.util.Packages.SYSTEMD_SERVICE; +import static org.elasticsearch.packaging.util.Packages.assertInstalled; +import static org.elasticsearch.packaging.util.Packages.assertRemoved; +import static org.elasticsearch.packaging.util.Packages.install; +import static org.elasticsearch.packaging.util.Packages.remove; +import static org.elasticsearch.packaging.util.Packages.startElasticsearch; +import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation; +import static org.elasticsearch.packaging.util.Platforms.getOsRelease; +import static org.elasticsearch.packaging.util.Platforms.isSystemd; +import static org.elasticsearch.packaging.util.ServerUtils.runElasticsearchTests; + +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.isEmptyString; + +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertFalse; +import static org.junit.Assume.assumeThat; +import static org.junit.Assume.assumeTrue; + +@TestCaseOrdering(TestCaseOrdering.AlphabeticOrder.class) +public abstract class PackageTestCase extends PackagingTestCase { + + private static Installation installation; + + protected abstract Distribution distribution(); + + @BeforeClass + public static void cleanup() { + installation = null; + cleanEverything(); + } + + @Before + public void onlyCompatibleDistributions() { + assumeTrue("only compatible distributions", distribution().packaging.compatible); + } + + public void test10InstallPackage() { + assertRemoved(distribution()); + installation = install(distribution()); + assertInstalled(distribution()); + verifyPackageInstallation(installation, distribution()); + } + + public void test20PluginsCommandWhenNoPlugins() { + assumeThat(installation, is(notNullValue())); + + final Shell sh = new Shell(); + assertThat(sh.run(installation.bin("elasticsearch-plugin") + " list").stdout, isEmptyString()); + } + + public void test30InstallDoesNotStartServer() { + assumeThat(installation, is(notNullValue())); + + final Shell sh = new Shell(); + assertThat(sh.run("ps aux").stdout, not(containsString("org.elasticsearch.bootstrap.Elasticsearch"))); + } + + public void test40StartServer() throws IOException { + assumeThat(installation, is(notNullValue())); + + startElasticsearch(); + runElasticsearchTests(); + verifyPackageInstallation(installation, distribution()); // check startup script didn't change permissions + } + + public void test50Remove() { + assumeThat(installation, is(notNullValue())); + + remove(distribution()); + + // removing must stop the service + final Shell sh = new Shell(); + assertThat(sh.run("ps aux").stdout, not(containsString("org.elasticsearch.bootstrap.Elasticsearch"))); + + if (isSystemd()) { + + final int statusExitCode; + + // Before version 231 systemctl returned exit code 3 for both services that were stopped, and nonexistent + // services [1]. In version 231 and later it returns exit code 4 for non-existent services. + // + // The exception is Centos 7 and oel 7 where it returns exit code 4 for non-existent services from a systemd reporting a version + // earlier than 231. Centos 6 does not have an /etc/os-release, but that's fine because it also doesn't use systemd. + // + // [1] https://github.com/systemd/systemd/pull/3385 + if (getOsRelease().contains("ID=\"centos\"") || getOsRelease().contains("ID=\"ol\"")) { + statusExitCode = 4; + } else { + + final Result versionResult = sh.run("systemctl --version"); + final Matcher matcher = Pattern.compile("^systemd (\\d+)\n").matcher(versionResult.stdout); + matcher.find(); + final int version = Integer.parseInt(matcher.group(1)); + + statusExitCode = version < 231 + ? 3 + : 4; + } + + assertThat(sh.runIgnoreExitCode("systemctl status elasticsearch.service").exitCode, is(statusExitCode)); + assertThat(sh.runIgnoreExitCode("systemctl is-enabled elasticsearch.service").exitCode, is(1)); + + } + + assertPathsDontExist( + installation.bin, + installation.lib, + installation.modules, + installation.plugins, + installation.logs, + installation.pidDir + ); + + assertFalse(Files.exists(SYSTEMD_SERVICE)); + } + + public void test60Reinstall() { + assumeThat(installation, is(notNullValue())); + + installation = install(distribution()); + assertInstalled(distribution()); + verifyPackageInstallation(installation, distribution()); + + remove(distribution()); + assertRemoved(distribution()); + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackagingTestCase.java new file mode 100644 index 0000000000000..77644b70f28e1 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import com.carrotsearch.randomizedtesting.JUnit3MethodProvider; +import com.carrotsearch.randomizedtesting.RandomizedRunner; +import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.Before; +import org.junit.Rule; +import org.junit.rules.TestName; +import org.junit.runner.RunWith; + +@RunWith(RandomizedRunner.class) +@TestMethodProviders({ + JUnit3MethodProvider.class +}) +/** + * Class that all packaging test cases should inherit from. This makes working with the packaging tests more similar to what we're + * familiar with from {@link org.elasticsearch.test.ESTestCase} without having to apply its behavior that's not relevant here + */ +public abstract class PackagingTestCase { + + protected final Log logger = LogFactory.getLog(getClass()); + + @Rule + public final TestName testNameRule = new TestName(); + + @Before + public void logTestNameBefore() { + logger.info("[" + testNameRule.getMethodName() + "]: before test"); + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/RpmPreservationTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/RpmPreservationTestCase.java new file mode 100644 index 0000000000000..527c1d2cc13a2 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/RpmPreservationTestCase.java @@ -0,0 +1,141 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering; +import org.elasticsearch.packaging.util.Distribution; +import org.elasticsearch.packaging.util.Installation; +import org.elasticsearch.packaging.util.Shell; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.stream.Stream; + +import static org.elasticsearch.packaging.util.Cleanup.cleanEverything; +import static org.elasticsearch.packaging.util.FileUtils.append; +import static org.elasticsearch.packaging.util.FileUtils.assertPathsDontExist; +import static org.elasticsearch.packaging.util.Packages.SYSTEMD_SERVICE; +import static org.elasticsearch.packaging.util.Packages.SYSVINIT_SCRIPT; +import static org.elasticsearch.packaging.util.Packages.assertInstalled; +import static org.elasticsearch.packaging.util.Packages.assertRemoved; +import static org.elasticsearch.packaging.util.Packages.install; +import static org.elasticsearch.packaging.util.Packages.remove; +import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation; +import static org.elasticsearch.packaging.util.Platforms.isRPM; +import static org.elasticsearch.packaging.util.Platforms.isSystemd; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeThat; +import static org.junit.Assume.assumeTrue; + +@TestCaseOrdering(TestCaseOrdering.AlphabeticOrder.class) +public abstract class RpmPreservationTestCase extends PackagingTestCase { + + private static Installation installation; + + protected abstract Distribution distribution(); + + @BeforeClass + public static void cleanup() { + installation = null; + cleanEverything(); + } + + @Before + public void onlyCompatibleDistributions() { + assumeTrue("only rpm platforms", isRPM()); + assumeTrue("only compatible distributions", distribution().packaging.compatible); + } + + public void test10Install() { + assertRemoved(distribution()); + installation = install(distribution()); + assertInstalled(distribution()); + verifyPackageInstallation(installation, distribution()); + } + + public void test20Remove() { + assumeThat(installation, is(notNullValue())); + + remove(distribution()); + + // config was removed + assertFalse(Files.exists(installation.config)); + + // sysvinit service file was removed + assertFalse(Files.exists(SYSVINIT_SCRIPT)); + + // defaults file was removed + assertFalse(Files.exists(installation.envFile)); + } + + public void test30PreserveConfig() { + final Shell sh = new Shell(); + + installation = install(distribution()); + assertInstalled(distribution()); + verifyPackageInstallation(installation, distribution()); + + sh.run("echo foobar | " + installation.executables().elasticsearchKeystore + " add --stdin foo.bar"); + Stream.of( + installation.config("elasticsearch.yml"), + installation.config("jvm.options"), + installation.config("log4j2.properties") + ).forEach(path -> append(path, "# foo")); + + remove(distribution()); + assertRemoved(distribution()); + + if (isSystemd()) { + assertThat(sh.runIgnoreExitCode("systemctl is-enabled elasticsearch.service").exitCode, is(1)); + } + + assertPathsDontExist( + installation.bin, + installation.lib, + installation.modules, + installation.plugins, + installation.logs, + installation.pidDir, + installation.envFile, + SYSVINIT_SCRIPT, + SYSTEMD_SERVICE + ); + + assertTrue(Files.exists(installation.config)); + assertTrue(Files.exists(installation.config("elasticsearch.keystore"))); + + Stream.of( + "elasticsearch.yml", + "jvm.options", + "log4j2.properties" + ).forEach(configFile -> { + final Path original = installation.config(configFile); + final Path saved = installation.config(configFile + ".rpmsave"); + assertFalse(original + " should not exist", Files.exists(original)); + assertTrue(saved + " should exist", Files.exists(saved)); + }); + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java index 6ffec813eb041..9e9a453ca8422 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java @@ -35,7 +35,7 @@ import static org.elasticsearch.packaging.util.FileMatcher.p755; import static org.elasticsearch.packaging.util.FileUtils.getCurrentVersion; import static org.elasticsearch.packaging.util.FileUtils.getDefaultArchiveInstallPath; -import static org.elasticsearch.packaging.util.FileUtils.getPackagingArchivesDir; +import static org.elasticsearch.packaging.util.FileUtils.getDistributionFile; import static org.elasticsearch.packaging.util.FileUtils.lsGlob; import static org.elasticsearch.packaging.util.FileUtils.mv; @@ -66,7 +66,7 @@ public static Installation installArchive(Distribution distribution) { public static Installation installArchive(Distribution distribution, Path fullInstallPath, String version) { final Shell sh = new Shell(); - final Path distributionFile = getPackagingArchivesDir().resolve(distribution.filename(version)); + final Path distributionFile = getDistributionFile(distribution); final Path baseInstallPath = fullInstallPath.getParent(); final Path extractedPath = baseInstallPath.resolve("elasticsearch-" + version); @@ -106,7 +106,7 @@ public static Installation installArchive(Distribution distribution, Path fullIn Platforms.onLinux(() -> setupArchiveUsersLinux(fullInstallPath)); Platforms.onWindows(() -> setupArchiveUsersWindows(fullInstallPath)); - return new Installation(fullInstallPath); + return Installation.ofArchive(fullInstallPath); } private static void setupArchiveUsersLinux(Path installPath) { @@ -176,7 +176,6 @@ private static void verifyOssInstallation(Installation es, Distribution distribu ).forEach(dir -> assertThat(dir, file(Directory, owner, owner, p755))); assertThat(Files.exists(es.data), is(false)); - assertThat(Files.exists(es.scripts), is(false)); assertThat(es.bin, file(Directory, owner, owner, p755)); assertThat(es.lib, file(Directory, owner, owner, p755)); @@ -209,7 +208,7 @@ private static void verifyOssInstallation(Installation es, Distribution distribu "elasticsearch.yml", "jvm.options", "log4j2.properties" - ).forEach(config -> assertThat(es.config(config), file(File, owner, owner, p660))); + ).forEach(configFile -> assertThat(es.config(configFile), file(File, owner, owner, p660))); Stream.of( "NOTICE.txt", @@ -252,7 +251,7 @@ private static void verifyDefaultInstallation(Installation es, Distribution dist "roles.yml", "role_mapping.yml", "log4j2.properties" - ).forEach(config -> assertThat(es.config(config), file(File, owner, owner, p660))); + ).forEach(configFile -> assertThat(es.config(configFile), file(File, owner, owner, p660))); } public static void runElasticsearch(Installation installation) throws IOException { diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java index 4ff2998988c5f..fda61e9fb36e5 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java @@ -27,11 +27,9 @@ import static org.elasticsearch.packaging.util.FileUtils.getTempDir; import static org.elasticsearch.packaging.util.FileUtils.lsGlob; -import static org.elasticsearch.packaging.util.Platforms.isAptGet; import static org.elasticsearch.packaging.util.Platforms.isDPKG; import static org.elasticsearch.packaging.util.Platforms.isRPM; import static org.elasticsearch.packaging.util.Platforms.isSystemd; -import static org.elasticsearch.packaging.util.Platforms.isYUM; public class Cleanup { @@ -100,19 +98,14 @@ private static void purgePackagesLinux() { final Shell sh = new Shell(); if (isRPM()) { - sh.runIgnoreExitCode("rpm --quiet -e elasticsearch elasticsearch-oss"); - } - - if (isYUM()) { - sh.runIgnoreExitCode("yum remove -y elasticsearch elasticsearch-oss"); + // Doing rpm erase on both packages in one command will remove neither since both cannot be installed + // this may leave behind config files in /etc/elasticsearch, but a later step in this cleanup will get them + sh.runIgnoreExitCode("rpm --quiet -e elasticsearch"); + sh.runIgnoreExitCode("rpm --quiet -e elasticsearch-oss"); } if (isDPKG()) { sh.runIgnoreExitCode("dpkg --purge elasticsearch elasticsearch-oss"); } - - if (isAptGet()) { - sh.runIgnoreExitCode("apt-get --quiet --yes purge elasticsearch elasticsearch-oss"); - } } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileMatcher.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileMatcher.java index 9fdf6d60081a3..34bae68f97f3e 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileMatcher.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileMatcher.java @@ -47,6 +47,7 @@ public class FileMatcher extends TypeSafeMatcher { public enum Fileness { File, Directory } public static final Set p755 = fromString("rwxr-xr-x"); + public static final Set p750 = fromString("rwxr-x---"); public static final Set p660 = fromString("rw-rw----"); public static final Set p644 = fromString("rw-r--r--"); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java index 315dc6ffee1f9..10d1b3ee6b6de 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java @@ -33,11 +33,14 @@ import java.nio.file.attribute.FileOwnerAttributeView; import java.nio.file.attribute.PosixFileAttributes; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsNot.not; import static org.hamcrest.text.IsEmptyString.isEmptyOrNullString; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; /** * Wrappers and convenience methods for common filesystem operations @@ -160,4 +163,20 @@ public static Path getPackagingArchivesDir() { assertThat(fromEnv, not(isEmptyOrNullString())); return Paths.get(fromEnv); } + + public static Path getDistributionFile(Distribution distribution) { + return getDistributionFile(distribution, getCurrentVersion()); + } + + public static Path getDistributionFile(Distribution distribution, String version) { + return getPackagingArchivesDir().resolve(distribution.filename(version)); + } + + public static void assertPathsExist(Path... paths) { + Arrays.stream(paths).forEach(path -> assertTrue(path + " should exist", Files.exists(path))); + } + + public static void assertPathsDontExist(Path... paths) { + Arrays.stream(paths).forEach(path -> assertFalse(path + " should not exist", Files.exists(path))); + } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java index 68da440400a36..40dc546f2306d 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java @@ -20,6 +20,7 @@ package org.elasticsearch.packaging.util; import java.nio.file.Path; +import java.nio.file.Paths; /** * Represents an installation of Elasticsearch @@ -34,9 +35,10 @@ public class Installation { public final Path logs; public final Path plugins; public final Path modules; - public final Path scripts; + public final Path pidDir; + public final Path envFile; - public Installation(Path home, Path config, Path data, Path logs, Path plugins, Path modules, Path scripts) { + public Installation(Path home, Path config, Path data, Path logs, Path plugins, Path modules, Path pidDir, Path envFile) { this.home = home; this.bin = home.resolve("bin"); this.lib = home.resolve("lib"); @@ -46,18 +48,38 @@ public Installation(Path home, Path config, Path data, Path logs, Path plugins, this.logs = logs; this.plugins = plugins; this.modules = modules; - this.scripts = scripts; + this.pidDir = pidDir; + this.envFile = envFile; } - public Installation(Path home) { - this( + public static Installation ofArchive(Path home) { + return new Installation( home, home.resolve("config"), home.resolve("data"), home.resolve("logs"), home.resolve("plugins"), home.resolve("modules"), - home.resolve("scripts") + null, + null + ); + } + + public static Installation ofPackage(Distribution.Packaging packaging) { + + final Path envFile = (packaging == Distribution.Packaging.RPM) + ? Paths.get("/etc/sysconfig/elasticsearch") + : Paths.get("/etc/default/elasticsearch"); + + return new Installation( + Paths.get("/usr/share/elasticsearch"), + Paths.get("/etc/elasticsearch"), + Paths.get("/var/lib/elasticsearch"), + Paths.get("/var/log/elasticsearch"), + Paths.get("/usr/share/elasticsearch/plugins"), + Paths.get("/usr/share/elasticsearch/modules"), + Paths.get("/var/run/elasticsearch"), + envFile ); } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java new file mode 100644 index 0000000000000..6e80d9e027df2 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java @@ -0,0 +1,259 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.util; + +import org.elasticsearch.packaging.util.Shell.Result; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.regex.Pattern; +import java.util.stream.Stream; + +import static org.elasticsearch.packaging.util.FileMatcher.Fileness.Directory; +import static org.elasticsearch.packaging.util.FileMatcher.Fileness.File; +import static org.elasticsearch.packaging.util.FileMatcher.file; +import static org.elasticsearch.packaging.util.FileMatcher.p644; +import static org.elasticsearch.packaging.util.FileMatcher.p660; +import static org.elasticsearch.packaging.util.FileMatcher.p750; +import static org.elasticsearch.packaging.util.FileMatcher.p755; +import static org.elasticsearch.packaging.util.FileUtils.getCurrentVersion; +import static org.elasticsearch.packaging.util.FileUtils.getDistributionFile; +import static org.elasticsearch.packaging.util.Platforms.isSysVInit; +import static org.elasticsearch.packaging.util.Platforms.isSystemd; +import static org.elasticsearch.packaging.util.ServerUtils.waitForElasticsearch; +import static org.hamcrest.CoreMatchers.anyOf; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class Packages { + + public static final Path SYSVINIT_SCRIPT = Paths.get("/etc/init.d/elasticsearch"); + public static final Path SYSTEMD_SERVICE = Paths.get("/usr/lib/systemd/system/elasticsearch.service"); + + public static void assertInstalled(Distribution distribution) { + final Result status = packageStatus(distribution); + assertThat(status.exitCode, is(0)); + + Platforms.onDPKG(() -> assertFalse(Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find())); + } + + public static void assertRemoved(Distribution distribution) { + final Result status = packageStatus(distribution); + + Platforms.onRPM(() -> assertThat(status.exitCode, is(1))); + + Platforms.onDPKG(() -> { + assertThat(status.exitCode, anyOf(is(0), is(1))); + if (status.exitCode == 0) { + assertTrue(Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find()); + } + }); + } + + public static Result packageStatus(Distribution distribution) { + final Shell sh = new Shell(); + final Result result; + + if (distribution.packaging == Distribution.Packaging.RPM) { + result = sh.runIgnoreExitCode("rpm -qe " + distribution.flavor.name); + } else { + result = sh.runIgnoreExitCode("dpkg -s " + distribution.flavor.name); + } + + return result; + } + + public static Installation install(Distribution distribution) { + return install(distribution, getCurrentVersion()); + } + + public static Installation install(Distribution distribution, String version) { + final Shell sh = new Shell(); + final Path distributionFile = getDistributionFile(distribution, version); + + Platforms.onRPM(() -> sh.run("rpm -i " + distributionFile)); + Platforms.onDPKG(() -> sh.run("dpkg -i " + distributionFile)); + + return Installation.ofPackage(distribution.packaging); + } + + public static void remove(Distribution distribution) { + final Shell sh = new Shell(); + + Platforms.onRPM(() -> { + sh.run("rpm -e " + distribution.flavor.name); + final Result status = packageStatus(distribution); + assertThat(status.exitCode, is(1)); + }); + + Platforms.onDPKG(() -> { + sh.run("dpkg -r " + distribution.flavor.name); + final Result status = packageStatus(distribution); + assertThat(status.exitCode, is(0)); + assertTrue(Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find()); + }); + } + + public static void verifyPackageInstallation(Installation installation, Distribution distribution) { + verifyOssInstallation(installation, distribution); + if (distribution.flavor == Distribution.Flavor.DEFAULT) { + verifyDefaultInstallation(installation); + } + } + + + private static void verifyOssInstallation(Installation es, Distribution distribution) { + final Shell sh = new Shell(); + + sh.run("id elasticsearch"); + sh.run("getent group elasticsearch"); + + final Result passwdResult = sh.run("getent passwd elasticsearch"); + final Path homeDir = Paths.get(passwdResult.stdout.trim().split(":")[5]); + assertFalse("elasticsearch user home directory must not exist", Files.exists(homeDir)); + + Stream.of( + es.home, + es.plugins, + es.modules + ).forEach(dir -> assertThat(dir, file(Directory, "root", "root", p755))); + + assertThat(es.pidDir, file(Directory, "elasticsearch", "elasticsearch", p755)); + + Stream.of( + es.data, + es.logs + ).forEach(dir -> assertThat(dir, file(Directory, "elasticsearch", "elasticsearch", p750))); + + // we shell out here because java's posix file permission view doesn't support special modes + assertThat(es.config, file(Directory, "root", "elasticsearch", p750)); + assertThat(sh.run("find \"" + es.config + "\" -maxdepth 0 -printf \"%m\"").stdout, containsString("2750")); + + Stream.of( + "elasticsearch.keystore", + "elasticsearch.yml", + "jvm.options", + "log4j2.properties" + ).forEach(configFile -> assertThat(es.config(configFile), file(File, "root", "elasticsearch", p660))); + assertThat(es.config(".elasticsearch.keystore.initial_md5sum"), file(File, "root", "elasticsearch", p644)); + + assertThat(sh.run("sudo -u elasticsearch " + es.bin("elasticsearch-keystore") + " list").stdout, containsString("keystore.seed")); + + Stream.of( + es.bin, + es.lib + ).forEach(dir -> assertThat(dir, file(Directory, "root", "root", p755))); + + Stream.of( + "elasticsearch", + "elasticsearch-plugin", + "elasticsearch-keystore", + "elasticsearch-translog" + ).forEach(executable -> assertThat(es.bin(executable), file(File, "root", "root", p755))); + + Stream.of( + "NOTICE.txt", + "README.textile" + ).forEach(doc -> assertThat(es.home.resolve(doc), file(File, "root", "root", p644))); + + assertThat(es.envFile, file(File, "root", "elasticsearch", p660)); + + if (distribution.packaging == Distribution.Packaging.RPM) { + assertThat(es.home.resolve("LICENSE.txt"), file(File, "root", "root", p644)); + } else { + Path copyrightDir = Paths.get(sh.run("readlink -f /usr/share/doc/" + distribution.flavor.name).stdout.trim()); + assertThat(copyrightDir, file(Directory, "root", "root", p755)); + assertThat(copyrightDir.resolve("copyright"), file(File, "root", "root", p644)); + } + + if (isSystemd()) { + Stream.of( + SYSTEMD_SERVICE, + Paths.get("/usr/lib/tmpfiles.d/elasticsearch.conf"), + Paths.get("/usr/lib/sysctl.d/elasticsearch.conf") + ).forEach(confFile -> assertThat(confFile, file(File, "root", "root", p644))); + + final String sysctlExecutable = (distribution.packaging == Distribution.Packaging.RPM) + ? "/usr/sbin/sysctl" + : "/sbin/sysctl"; + assertThat(sh.run(sysctlExecutable + " vm.max_map_count").stdout, containsString("vm.max_map_count = 262144")); + } + + if (isSysVInit()) { + assertThat(SYSVINIT_SCRIPT, file(File, "root", "root", p750)); + } + } + + private static void verifyDefaultInstallation(Installation es) { + + Stream.of( + "elasticsearch-certgen", + "elasticsearch-certutil", + "elasticsearch-croneval", + "elasticsearch-migrate", + "elasticsearch-saml-metadata", + "elasticsearch-setup-passwords", + "elasticsearch-sql-cli", + "elasticsearch-syskeygen", + "elasticsearch-users", + "x-pack-env", + "x-pack-security-env", + "x-pack-watcher-env" + ).forEach(executable -> assertThat(es.bin(executable), file(File, "root", "root", p755))); + + // at this time we only install the current version of archive distributions, but if that changes we'll need to pass + // the version through here + assertThat(es.bin("elasticsearch-sql-cli-" + getCurrentVersion() + ".jar"), file(File, "root", "root", p755)); + + Stream.of( + "users", + "users_roles", + "roles.yml", + "role_mapping.yml", + "log4j2.properties" + ).forEach(configFile -> assertThat(es.config(configFile), file(File, "root", "elasticsearch", p660))); + } + + public static void startElasticsearch() throws IOException { + final Shell sh = new Shell(); + if (isSystemd()) { + sh.run("systemctl daemon-reload"); + sh.run("systemctl enable elasticsearch.service"); + sh.run("systemctl is-enabled elasticsearch.service"); + sh.run("systemctl start elasticsearch.service"); + } else { + sh.run("service elasticsearch start"); + } + + waitForElasticsearch(); + + if (isSystemd()) { + sh.run("systemctl is-active elasticsearch.service"); + sh.run("systemctl status elasticsearch.service"); + } else { + sh.run("service elasticsearch status"); + } + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java index 5ffbc31820022..c7ca1284ca69a 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java @@ -19,23 +19,28 @@ package org.elasticsearch.packaging.util; +import java.nio.file.Paths; + +import static org.elasticsearch.packaging.util.FileUtils.slurp; + public class Platforms { public static final String OS_NAME = System.getProperty("os.name"); public static final boolean LINUX = OS_NAME.startsWith("Linux"); public static final boolean WINDOWS = OS_NAME.startsWith("Windows"); - public static boolean isDPKG() { - if (WINDOWS) { - return false; + public static String getOsRelease() { + if (LINUX) { + return slurp(Paths.get("/etc/os-release")); + } else { + throw new RuntimeException("os-release is only supported on linux"); } - return new Shell().runIgnoreExitCode("which dpkg").isSuccess(); } - public static boolean isAptGet() { + public static boolean isDPKG() { if (WINDOWS) { return false; } - return new Shell().runIgnoreExitCode("which apt-get").isSuccess(); + return new Shell().runIgnoreExitCode("which dpkg").isSuccess(); } public static boolean isRPM() { @@ -45,13 +50,6 @@ public static boolean isRPM() { return new Shell().runIgnoreExitCode("which rpm").isSuccess(); } - public static boolean isYUM() { - if (WINDOWS) { - return false; - } - return new Shell().runIgnoreExitCode("which yum").isSuccess(); - } - public static boolean isSystemd() { if (WINDOWS) { return false; @@ -78,6 +76,18 @@ public static void onLinux(PlatformAction action) { } } + public static void onRPM(PlatformAction action) { + if (isRPM()) { + action.run(); + } + } + + public static void onDPKG(PlatformAction action) { + if (isDPKG()) { + action.run(); + } + } + /** * Essentially a Runnable, but we make the distinction so it's more clear that these are synchronous */ diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java index ff006a34e6892..6331b4bf46e9a 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java @@ -72,7 +72,7 @@ public static void waitForElasticsearch(String status, String index) throws IOEx } catch (HttpHostConnectException e) { // we want to retry if the connection is refused - LOG.info("Got connection refused when waiting for cluster health", e); + LOG.debug("Got connection refused when waiting for cluster health", e); } timeElapsed = System.currentTimeMillis() - startTime; diff --git a/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats b/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats deleted file mode 100644 index 749c72c8b312f..0000000000000 --- a/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats +++ /dev/null @@ -1,233 +0,0 @@ -#!/usr/bin/env bats - -# This file is used to test the installation and removal -# of a Debian package. - -# WARNING: This testing file must be executed as root and can -# dramatically change your system. It should only be executed -# in a throw-away VM like those made by the Vagrantfile at -# the root of the Elasticsearch source code. This should -# cause the script to fail if it is executed any other way: -[ -f /etc/is_vagrant_vm ] || { - >&2 echo "must be run on a vagrant VM" - exit 1 -} - -# The test case can be executed with the Bash Automated -# Testing System tool available at https://github.com/sstephenson/bats -# Thanks to Sam Stephenson! - -# Licensed to Elasticsearch under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Load test utilities -load $BATS_UTILS/utils.bash -load $BATS_UTILS/packages.bash -load $BATS_UTILS/plugins.bash - -# Cleans everything for the 1st execution -setup() { - skip_not_dpkg - export_elasticsearch_paths -} - -@test "[DEB] package depends on bash" { - dpkg -I elasticsearch-oss-$(cat version).deb | grep "Depends:.*bash.*" -} - -@test "[DEB] package conflicts" { - dpkg -I elasticsearch-oss-$(cat version).deb | grep "^ Conflicts: elasticsearch$" - dpkg -I elasticsearch-$(cat version).deb | grep "^ Conflicts: elasticsearch-oss$" -} - -################################## -# Install DEB package -################################## -@test "[DEB] dpkg command is available" { - clean_before_test - dpkg --version -} - -@test "[DEB] package is available" { - count=$(ls elasticsearch-oss-$(cat version).deb | wc -l) - [ "$count" -eq 1 ] -} - -@test "[DEB] package is not installed" { - run dpkg -s 'elasticsearch-oss' - [ "$status" -eq 1 ] -} - -@test "[DEB] temporarily remove java and ensure the install fails" { - move_java - run dpkg -i elasticsearch-oss-$(cat version).deb - output=$status - unmove_java - [ "$output" -eq 1 ] -} - -@test "[DEB] install package" { - dpkg -i elasticsearch-oss-$(cat version).deb -} - -@test "[DEB] package is installed" { - dpkg -s 'elasticsearch-oss' -} - -@test "[DEB] verify package installation" { - verify_package_installation -} - -@test "[DEB] verify elasticsearch-plugin list runs without any plugins installed" { - local plugins_list=`$ESHOME/bin/elasticsearch-plugin list` - [[ -z $plugins_list ]] -} - -@test "[DEB] elasticsearch isn't started by package install" { - # Wait a second to give Elasticsearch a change to start if it is going to. - # This isn't perfect by any means but its something. - sleep 1 - ! ps aux | grep elasticsearch | grep java - # You might be tempted to use jps instead of the above but that'd have to - # look like: - # ! sudo -u elasticsearch jps | grep -i elasticsearch - # which isn't really easier to read than the above. -} - -@test "[DEB] test elasticsearch" { - start_elasticsearch_service - run_elasticsearch_tests -} - -@test "[DEB] verify package installation after start" { - # Checks that the startup scripts didn't change the permissions - verify_package_installation -} - -################################## -# Uninstall DEB package -################################## -@test "[DEB] remove package" { - dpkg -r 'elasticsearch-oss' -} - -@test "[DEB] package has been removed" { - run dpkg -s 'elasticsearch-oss' - [ "$status" -eq 0 ] - echo "$output" | grep -i "status" | grep -i "deinstall ok" -} - -@test "[DEB] verify package removal" { - # The removal must stop the service - count=$(ps | grep Elasticsearch | wc -l) - [ "$count" -eq 0 ] - - # The removal must disable the service - # see prerm file - if is_systemd; then - missing_exit_code=4 - if [ $(systemctl --version | head -1 | awk '{print $2}') -lt 231 ]; then - # systemd before version 231 used exit code 3 when the service did not exist - missing_exit_code=3 - fi - run systemctl status elasticsearch.service - [ "$status" -eq $missing_exit_code ] - - run systemctl is-enabled elasticsearch.service - [ "$status" -eq 1 ] - fi - - # Those directories are deleted when removing the package - # see postrm file - assert_file_not_exist "/var/log/elasticsearch" - assert_file_not_exist "/usr/share/elasticsearch/plugins" - assert_file_not_exist "/usr/share/elasticsearch/modules" - assert_file_not_exist "/var/run/elasticsearch" - - # Those directories are removed by the package manager - assert_file_not_exist "/usr/share/elasticsearch/bin" - assert_file_not_exist "/usr/share/elasticsearch/lib" - assert_file_not_exist "/usr/share/elasticsearch/modules" - assert_file_not_exist "/usr/share/elasticsearch/modules/lang-painless" - - # The configuration files are still here - assert_file_exist "/etc/elasticsearch" - # TODO: use ucf to handle these better for Debian-based systems - assert_file_not_exist "/etc/elasticsearch/elasticsearch.keystore" - assert_file_not_exist "/etc/elasticsearch/.elasticsearch.keystore.initial_md5sum" - assert_file_exist "/etc/elasticsearch/elasticsearch.yml" - assert_file_exist "/etc/elasticsearch/jvm.options" - assert_file_exist "/etc/elasticsearch/log4j2.properties" - - # The env file is still here - assert_file_exist "/etc/default/elasticsearch" - - # The service files are still here - assert_file_exist "/etc/init.d/elasticsearch" -} - -@test "[DEB] purge package" { - # User installed scripts aren't removed so we'll just get them ourselves - rm -rf $ESSCRIPTS - dpkg --purge 'elasticsearch-oss' -} - -@test "[DEB] verify package purge" { - # all remaining files are deleted by the purge - assert_file_not_exist "/etc/elasticsearch" - assert_file_not_exist "/etc/elasticsearch/elasticsearch.keystore" - assert_file_not_exist "/etc/elasticsearch/.elasticsearch.keystore.initial_md5sum" - assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml" - assert_file_not_exist "/etc/elasticsearch/jvm.options" - assert_file_not_exist "/etc/elasticsearch/log4j2.properties" - - assert_file_not_exist "/etc/default/elasticsearch" - - assert_file_not_exist "/etc/init.d/elasticsearch" - assert_file_not_exist "/usr/lib/systemd/system/elasticsearch.service" - - assert_file_not_exist "/usr/share/elasticsearch" - - assert_file_not_exist "/usr/share/doc/elasticsearch-oss" - assert_file_not_exist "/usr/share/doc/elasticsearch-oss/copyright" -} - -@test "[DEB] package has been completly removed" { - run dpkg -s 'elasticsearch-oss' - [ "$status" -eq 1 ] -} - -@test "[DEB] reinstall package" { - dpkg -i elasticsearch-oss-$(cat version).deb -} - -@test "[DEB] package is installed by reinstall" { - dpkg -s 'elasticsearch-oss' -} - -@test "[DEB] verify package reinstallation" { - verify_package_installation -} - -@test "[DEB] repurge package" { - dpkg --purge 'elasticsearch-oss' -} - -@test "[DEB] package has been completly removed again" { - run dpkg -s 'elasticsearch-oss' - [ "$status" -eq 1 ] -} diff --git a/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats b/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats deleted file mode 100644 index cb12d4b50e02b..0000000000000 --- a/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats +++ /dev/null @@ -1,220 +0,0 @@ -#!/usr/bin/env bats - -# This file is used to test the installation of a RPM package. - -# WARNING: This testing file must be executed as root and can -# dramatically change your system. It should only be executed -# in a throw-away VM like those made by the Vagrantfile at -# the root of the Elasticsearch source code. This should -# cause the script to fail if it is executed any other way: -[ -f /etc/is_vagrant_vm ] || { - >&2 echo "must be run on a vagrant VM" - exit 1 -} - -# The test case can be executed with the Bash Automated -# Testing System tool available at https://github.com/sstephenson/bats -# Thanks to Sam Stephenson! - -# Licensed to Elasticsearch under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Load test utilities -load $BATS_UTILS/utils.bash -load $BATS_UTILS/packages.bash -load $BATS_UTILS/plugins.bash - -# Cleans everything for the 1st execution -setup() { - skip_not_rpm - export_elasticsearch_paths -} - -@test "[RPM] package depends on bash" { - rpm -qpR elasticsearch-oss-$(cat version).rpm | grep '/bin/bash' -} - -@test "[RPM] package conflicts" { - rpm -qp --conflicts elasticsearch-oss-$(cat version).rpm | grep "^elasticsearch\s*$" - rpm -qp --conflicts elasticsearch-$(cat version).rpm | grep "^elasticsearch-oss\s*$" -} - -################################## -# Install RPM package -################################## -@test "[RPM] rpm command is available" { - clean_before_test - rpm --version -} - -@test "[RPM] package is available" { - count=$(ls elasticsearch-oss-$(cat version).rpm | wc -l) - [ "$count" -eq 1 ] -} - -@test "[RPM] package is not installed" { - run rpm -qe 'elasticsearch-oss' - [ "$status" -eq 1 ] -} - -@test "[RPM] temporarily remove java and ensure the install fails" { - move_java - run rpm -i elasticsearch-oss-$(cat version).rpm - output=$status - unmove_java - [ "$output" -eq 1 ] -} - -@test "[RPM] install package" { - rpm -i elasticsearch-oss-$(cat version).rpm -} - -@test "[RPM] package is installed" { - rpm -qe 'elasticsearch-oss' -} - -@test "[RPM] verify package installation" { - verify_package_installation -} - -@test "[RPM] verify elasticsearch-plugin list runs without any plugins installed" { - local plugins_list=`$ESHOME/bin/elasticsearch-plugin list` - [[ -z $plugins_list ]] -} - -@test "[RPM] elasticsearch isn't started by package install" { - # Wait a second to give Elasticsearch a change to start if it is going to. - # This isn't perfect by any means but its something. - sleep 1 - ! ps aux | grep elasticsearch | grep java -} - -@test "[RPM] test elasticsearch" { - start_elasticsearch_service - run_elasticsearch_tests -} - -@test "[RPM] verify package installation after start" { - # Checks that the startup scripts didn't change the permissions - verify_package_installation -} - -@test "[RPM] remove package" { - # User installed scripts aren't removed so we'll just get them ourselves - rm -rf $ESSCRIPTS - rpm -e 'elasticsearch-oss' -} - -@test "[RPM] package has been removed" { - run rpm -qe 'elasticsearch-oss' - [ "$status" -eq 1 ] -} - -@test "[RPM] verify package removal" { - # The removal must stop the service - count=$(ps | grep Elasticsearch | wc -l) - [ "$count" -eq 0 ] - - # The removal must disable the service - # see prerm file - if is_systemd; then - run systemctl is-enabled elasticsearch.service - [ "$status" -eq 1 ] - fi - - # Those directories are deleted when removing the package - # see postrm file - assert_file_not_exist "/var/log/elasticsearch" - assert_file_not_exist "/usr/share/elasticsearch/plugins" - assert_file_not_exist "/var/run/elasticsearch" - - # Those directories are removed by the package manager - assert_file_not_exist "/usr/share/elasticsearch/bin" - assert_file_not_exist "/usr/share/elasticsearch/lib" - assert_file_not_exist "/usr/share/elasticsearch/modules" - - assert_file_not_exist "/etc/elasticsearch" - - assert_file_not_exist "/etc/init.d/elasticsearch" - assert_file_not_exist "/usr/lib/systemd/system/elasticsearch.service" - - assert_file_not_exist "/etc/sysconfig/elasticsearch" -} - -@test "[RPM] reinstall package" { - rpm -i elasticsearch-oss-$(cat version).rpm -} - -@test "[RPM] package is installed by reinstall" { - rpm -qe 'elasticsearch-oss' -} - -@test "[RPM] verify package reinstallation" { - verify_package_installation -} - -@test "[RPM] reremove package" { - echo foobar | "$ESHOME/bin/elasticsearch-keystore" add --stdin foo.bar - echo "# ping" >> "/etc/elasticsearch/elasticsearch.yml" - echo "# ping" >> "/etc/elasticsearch/jvm.options" - echo "# ping" >> "/etc/elasticsearch/log4j2.properties" - rpm -e 'elasticsearch-oss' -} - -@test "[RPM] verify preservation" { - # The removal must disable the service - # see prerm file - if is_systemd; then - run systemctl is-enabled elasticsearch.service - [ "$status" -eq 1 ] - fi - - # Those directories are deleted when removing the package - # see postrm file - assert_file_not_exist "/var/log/elasticsearch" - assert_file_not_exist "/usr/share/elasticsearch/plugins" - assert_file_not_exist "/usr/share/elasticsearch/modules" - assert_file_not_exist "/var/run/elasticsearch" - - assert_file_not_exist "/usr/share/elasticsearch/bin" - assert_file_not_exist "/usr/share/elasticsearch/lib" - assert_file_not_exist "/usr/share/elasticsearch/modules" - assert_file_not_exist "/usr/share/elasticsearch/modules/lang-painless" - - assert_file_exist "/etc/elasticsearch/elasticsearch.keystore" - assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml" - assert_file_exist "/etc/elasticsearch/elasticsearch.yml.rpmsave" - assert_file_not_exist "/etc/elasticsearch/jvm.options" - assert_file_exist "/etc/elasticsearch/jvm.options.rpmsave" - assert_file_not_exist "/etc/elasticsearch/log4j2.properties" - assert_file_exist "/etc/elasticsearch/log4j2.properties.rpmsave" - - assert_file_not_exist "/etc/init.d/elasticsearch" - assert_file_not_exist "/usr/lib/systemd/system/elasticsearch.service" - - assert_file_not_exist "/etc/sysconfig/elasticsearch" -} - -@test "[RPM] finalize package removal" { - # cleanup - rm -rf /etc/elasticsearch -} - -@test "[RPM] package has been removed again" { - run rpm -qe 'elasticsearch-oss' - [ "$status" -eq 1 ] -} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml index 42847b05cd149..564a482727fa7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml @@ -41,6 +41,10 @@ setup: --- "Index - all": + - skip: + version: " - 6.3.99" + reason: "uuid is only available from 6.4.0 on" + - do: indices.stats: { index: _all } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.status/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.status/10_basic.yml index c9dd85b11dea7..c35f2419bdc91 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.status/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.status/10_basic.yml @@ -38,7 +38,8 @@ setup: - gt: { snapshots.0.stats.total.file_count: 0 } - gt: { snapshots.0.stats.total.size_in_bytes: 0 } - is_true: snapshots.0.stats.start_time_in_millis - - is_true: snapshots.0.stats.time_in_millis +## fast in memory snapshots can take less than one millisecond to complete. + - gte: { snapshots.0.stats.time_in_millis: 0 } --- "Get missing snapshot status throws an exception": diff --git a/server/build.gradle b/server/build.gradle index da60bca5a3e81..c71cc4c7dbdcf 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -329,7 +329,7 @@ if (isEclipse == false || project.path == ":server-tests") { task integTest(type: RandomizedTestingTask, group: JavaBasePlugin.VERIFICATION_GROUP, description: 'Multi-node tests', - dependsOn: test.dependsOn.collect()) { + dependsOn: test.dependsOn) { configure(BuildPlugin.commonTestConfig(project)) classpath = project.test.classpath testClassesDirs = project.test.testClassesDirs @@ -339,12 +339,3 @@ if (isEclipse == false || project.path == ":server-tests") { integTest.mustRunAfter test } -// TODO: remove ScriptDocValuesMissingV6BehaviourTests in 7.0 -additionalTest('testScriptDocValuesMissingV6Behaviour'){ - include '**/ScriptDocValuesMissingV6BehaviourTests.class' - systemProperty 'es.scripting.exception_for_missing_value', 'false' -} -test { - // these are tested explicitly in separate test tasks - exclude '**/*ScriptDocValuesMissingV6BehaviourTests.class' -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 35f1f725b65ad..5c5da62571f66 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.MultiTermAwareComponent; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.ReferringFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -574,6 +575,7 @@ private static List parseTokenFilterFactories(AnalyzeRequest Environment environment, Tuple tokenizerFactory, List charFilterFactoryList, boolean normalizer) throws IOException { List tokenFilterFactoryList = new ArrayList<>(); + List referringFilters = new ArrayList<>(); if (request.tokenFilters() != null && request.tokenFilters().size() > 0) { List tokenFilters = request.tokenFilters(); for (AnalyzeRequest.NameOrDefinition tokenFilter : tokenFilters) { @@ -594,7 +596,9 @@ private static List parseTokenFilterFactories(AnalyzeRequest tokenFilterFactory = tokenFilterFactoryFactory.get(getNaIndexSettings(settings), environment, "_anonymous_tokenfilter", settings); tokenFilterFactory = CustomAnalyzerProvider.checkAndApplySynonymFilter(tokenFilterFactory, tokenizerFactory.v1(), tokenizerFactory.v2(), tokenFilterFactoryList, charFilterFactoryList, environment); - + if (tokenFilterFactory instanceof ReferringFilterFactory) { + referringFilters.add((ReferringFilterFactory)tokenFilterFactory); + } } else { AnalysisModule.AnalysisProvider tokenFilterFactoryFactory; @@ -629,6 +633,26 @@ private static List parseTokenFilterFactories(AnalyzeRequest tokenFilterFactoryList.add(tokenFilterFactory); } } + if (referringFilters.isEmpty() == false) { + // The request included at least one custom referring tokenfilter that has not already been built by the + // analysis registry, so we need to set its references. Note that this will only apply pre-built + // tokenfilters + if (indexSettings == null) { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .build(); + IndexMetaData metaData = IndexMetaData.builder(IndexMetaData.INDEX_UUID_NA_VALUE).settings(settings).build(); + indexSettings = new IndexSettings(metaData, Settings.EMPTY); + } + Map prebuiltFilters = analysisRegistry.buildTokenFilterFactories(indexSettings); + for (ReferringFilterFactory rff : referringFilters) { + rff.setReferences(prebuiltFilters); + } + + } return tokenFilterFactoryList; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index 07ae513351e1b..f9fc5880bbb5b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -39,7 +39,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentFieldMappers; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.TypeMissingException; @@ -174,19 +174,19 @@ private static Map findFieldMappingsByType(Predica final DocumentFieldMappers allFieldMappers = documentMapper.mappers(); for (String field : request.fields()) { if (Regex.isMatchAllPattern(field)) { - for (FieldMapper fieldMapper : allFieldMappers) { - addFieldMapper(fieldPredicate, fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults()); + for (Mapper fieldMapper : allFieldMappers) { + addFieldMapper(fieldPredicate, fieldMapper.name(), fieldMapper, fieldMappings, request.includeDefaults()); } } else if (Regex.isSimpleMatchPattern(field)) { - for (FieldMapper fieldMapper : allFieldMappers) { - if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) { - addFieldMapper(fieldPredicate, fieldMapper.fieldType().name(), + for (Mapper fieldMapper : allFieldMappers) { + if (Regex.simpleMatch(field, fieldMapper.name())) { + addFieldMapper(fieldPredicate, fieldMapper.name(), fieldMapper, fieldMappings, request.includeDefaults()); } } } else { // not a pattern - FieldMapper fieldMapper = allFieldMappers.getMapper(field); + Mapper fieldMapper = allFieldMappers.getMapper(field); if (fieldMapper != null) { addFieldMapper(fieldPredicate, field, fieldMapper, fieldMappings, request.includeDefaults()); } else if (request.probablySingleFieldRequest()) { @@ -198,7 +198,7 @@ private static Map findFieldMappingsByType(Predica } private static void addFieldMapper(Predicate fieldPredicate, - String field, FieldMapper fieldMapper, Map fieldMappings, + String field, Mapper fieldMapper, Map fieldMappings, boolean includeDefaults) { if (fieldMappings.containsKey(field)) { return; @@ -207,7 +207,7 @@ private static void addFieldMapper(Predicate fieldPredicate, try { BytesReference bytes = XContentHelper.toXContent(fieldMapper, XContentType.JSON, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS, false); - fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().name(), bytes)); + fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.name(), bytes)); } catch (IOException e) { throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index dc201b38c3bee..3429b35073ca2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -184,10 +184,13 @@ public PutMappingRequest source(Object... source) { } /** - * @param type the mapping type - * @param source consisting of field/properties pairs (e.g. "field1", - * "type=string,store=true"). If the number of arguments is not - * divisible by two an {@link IllegalArgumentException} is thrown + * @param type + * the mapping type + * @param source + * consisting of field/properties pairs (e.g. "field1", + * "type=string,store=true") + * @throws IllegalArgumentException + * if the number of the source arguments is not divisible by two * @return the mappings definition */ public static XContentBuilder buildFromSimplifiedDef(String type, Object... source) { diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index a6ed8de653007..939b0b7024904 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -295,7 +295,7 @@ protected void doRun() throws Exception { TransportUpdateAction.resolveAndValidateRouting(metaData, concreteIndex.getName(), (UpdateRequest) docWriteRequest); break; case DELETE: - docWriteRequest.routing(metaData.resolveIndexRouting(docWriteRequest.routing(), docWriteRequest.index())); + docWriteRequest.routing(metaData.resolveWriteIndexRouting(docWriteRequest.routing(), docWriteRequest.index())); // check if routing is required, if so, throw error if routing wasn't specified if (docWriteRequest.routing() == null && metaData.routingRequired(concreteIndex.getName(), docWriteRequest.type())) { throw new RoutingMissingException(concreteIndex.getName(), docWriteRequest.type(), docWriteRequest.id()); @@ -474,7 +474,7 @@ Index getConcreteIndex(String indexOrAlias) { Index resolveIfAbsent(DocWriteRequest request) { Index concreteIndex = indices.get(request.index()); if (concreteIndex == null) { - concreteIndex = indexNameExpressionResolver.concreteSingleIndex(state, request); + concreteIndex = indexNameExpressionResolver.concreteWriteIndex(state, request); indices.put(request.index(), concreteIndex); } return concreteIndex; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index a78421a2328cb..15a98077eac4a 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -523,13 +523,12 @@ private static Engine.Result performOpOnReplica(DocWriteResponse primaryResponse indexRequest.type(), indexRequest.id(), indexRequest.source(), indexRequest.getContentType()) .routing(indexRequest.routing()); result = replica.applyIndexOperationOnReplica(primaryResponse.getSeqNo(), primaryResponse.getVersion(), - indexRequest.versionType().versionTypeForReplicationAndRecovery(), indexRequest.getAutoGeneratedTimestamp(), - indexRequest.isRetry(), sourceToParse); + indexRequest.getAutoGeneratedTimestamp(), indexRequest.isRetry(), sourceToParse); break; case DELETE: DeleteRequest deleteRequest = (DeleteRequest) docWriteRequest; result = replica.applyDeleteOperationOnReplica(primaryResponse.getSeqNo(), primaryResponse.getVersion(), - deleteRequest.type(), deleteRequest.id(), deleteRequest.versionType().versionTypeForReplicationAndRecovery()); + deleteRequest.type(), deleteRequest.id()); break; default: throw new IllegalStateException("Unexpected request operation type on replica: " diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java index f1a1dc451406e..18f33ab397f73 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java @@ -83,8 +83,8 @@ protected FieldCapabilitiesIndexResponse shardOperation(final FieldCapabilitiesI for (String field : fieldNames) { MappedFieldType ft = mapperService.fullName(field); if (ft != null) { - FieldCapabilities fieldCap = new FieldCapabilities(field, ft.typeName(), ft.isSearchable(), ft.isAggregatable()); - if (indicesService.isMetaDataField(field) || fieldPredicate.test(field)) { + if (indicesService.isMetaDataField(field) || fieldPredicate.test(ft.name())) { + FieldCapabilities fieldCap = new FieldCapabilities(field, ft.typeName(), ft.isSearchable(), ft.isAggregatable()); responseMap.put(field, fieldCap); } } diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 51997b32edf1d..57e8ea6613817 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -496,7 +496,7 @@ public void process(Version indexCreatedVersion, @Nullable MappingMetaData mappi /* resolve the routing if needed */ public void resolveRouting(MetaData metaData) { - routing(metaData.resolveIndexRouting(routing, index)); + routing(metaData.resolveWriteIndexRouting(routing, index)); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 299a2ce812396..cc682619cbda5 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -104,7 +104,7 @@ protected void resolveRequest(ClusterState state, UpdateRequest request) { } public static void resolveAndValidateRouting(MetaData metaData, String concreteIndex, UpdateRequest request) { - request.routing((metaData.resolveIndexRouting(request.routing(), request.index()))); + request.routing((metaData.resolveWriteIndexRouting(request.routing(), request.index()))); // Fail fast on the node that received the request, rather than failing when translating on the index or delete request. if (request.routing() == null && metaData.routingRequired(concreteIndex, request.type())) { throw new RoutingMissingException(concreteIndex, request.type(), request.id()); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 8fa3c2e0fc193..1f6a9fe027d1b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -42,7 +42,6 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -103,7 +102,7 @@ public String[] concreteIndexNames(ClusterState state, IndicesOptions options, S return concreteIndexNames(context, indexExpressions); } - /** + /** * Translates the provided index expression into actual concrete indices, properly deduplicated. * * @param state the cluster state containing all the data to resolve to expressions to concrete indices @@ -117,7 +116,7 @@ public String[] concreteIndexNames(ClusterState state, IndicesOptions options, S * indices options in the context don't allow such a case. */ public Index[] concreteIndices(ClusterState state, IndicesOptions options, String... indexExpressions) { - Context context = new Context(state, options); + Context context = new Context(state, options, false, false); return concreteIndices(context, indexExpressions); } @@ -193,30 +192,40 @@ Index[] concreteIndices(Context context, String... indexExpressions) { } } - Collection resolvedIndices = aliasOrIndex.getIndices(); - if (resolvedIndices.size() > 1 && !options.allowAliasesToMultipleIndices()) { - String[] indexNames = new String[resolvedIndices.size()]; - int i = 0; - for (IndexMetaData indexMetaData : resolvedIndices) { - indexNames[i++] = indexMetaData.getIndex().getName(); + if (aliasOrIndex.isAlias() && context.isResolveToWriteIndex()) { + AliasOrIndex.Alias alias = (AliasOrIndex.Alias) aliasOrIndex; + IndexMetaData writeIndex = alias.getWriteIndex(); + if (writeIndex == null) { + throw new IllegalArgumentException("no write index is defined for alias [" + alias.getAliasName() + "]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index"); } - throw new IllegalArgumentException("Alias [" + expression + "] has more than one indices associated with it [" + + concreteIndices.add(writeIndex.getIndex()); + } else { + if (aliasOrIndex.getIndices().size() > 1 && !options.allowAliasesToMultipleIndices()) { + String[] indexNames = new String[aliasOrIndex.getIndices().size()]; + int i = 0; + for (IndexMetaData indexMetaData : aliasOrIndex.getIndices()) { + indexNames[i++] = indexMetaData.getIndex().getName(); + } + throw new IllegalArgumentException("Alias [" + expression + "] has more than one indices associated with it [" + Arrays.toString(indexNames) + "], can't execute a single index op"); - } + } - for (IndexMetaData index : resolvedIndices) { - if (index.getState() == IndexMetaData.State.CLOSE) { - if (failClosed) { - throw new IndexClosedException(index.getIndex()); - } else { - if (options.forbidClosedIndices() == false) { - concreteIndices.add(index.getIndex()); + for (IndexMetaData index : aliasOrIndex.getIndices()) { + if (index.getState() == IndexMetaData.State.CLOSE) { + if (failClosed) { + throw new IndexClosedException(index.getIndex()); + } else { + if (options.forbidClosedIndices() == false) { + concreteIndices.add(index.getIndex()); + } } + } else if (index.getState() == IndexMetaData.State.OPEN) { + concreteIndices.add(index.getIndex()); + } else { + throw new IllegalStateException("index state [" + index.getState() + "] not supported"); } - } else if (index.getState() == IndexMetaData.State.OPEN) { - concreteIndices.add(index.getIndex()); - } else { - throw new IllegalStateException("index state [" + index.getState() + "] not supported"); } } } @@ -255,6 +264,28 @@ public Index concreteSingleIndex(ClusterState state, IndicesRequest request) { return indices[0]; } + /** + * Utility method that allows to resolve an index expression to its corresponding single write index. + * + * @param state the cluster state containing all the data to resolve to expression to a concrete index + * @param request The request that defines how the an alias or an index need to be resolved to a concrete index + * and the expression that can be resolved to an alias or an index name. + * @throws IllegalArgumentException if the index resolution does not lead to an index, or leads to more than one index + * @return the write index obtained as a result of the index resolution + */ + public Index concreteWriteIndex(ClusterState state, IndicesRequest request) { + if (request.indices() == null || (request.indices() != null && request.indices().length != 1)) { + throw new IllegalArgumentException("indices request must specify a single index expression"); + } + Context context = new Context(state, request.indicesOptions(), false, true); + Index[] indices = concreteIndices(context, request.indices()[0]); + if (indices.length != 1) { + throw new IllegalArgumentException("The index expression [" + request.indices()[0] + + "] and options provided did not point to a single write-index"); + } + return indices[0]; + } + /** * @return whether the specified alias or index exists. If the alias or index contains datemath then that is resolved too. */ @@ -292,7 +323,7 @@ public String[] indexAliases(ClusterState state, String index, Predicate resolvedExpressions = expressions != null ? Arrays.asList(expressions) : Collections.emptyList(); - Context context = new Context(state, IndicesOptions.lenientExpandOpen(), true); + Context context = new Context(state, IndicesOptions.lenientExpandOpen(), true, false); for (ExpressionResolver expressionResolver : expressionResolvers) { resolvedExpressions = expressionResolver.resolve(context, resolvedExpressions); } @@ -512,24 +543,26 @@ static final class Context { private final IndicesOptions options; private final long startTime; private final boolean preserveAliases; + private final boolean resolveToWriteIndex; Context(ClusterState state, IndicesOptions options) { this(state, options, System.currentTimeMillis()); } - Context(ClusterState state, IndicesOptions options, boolean preserveAliases) { - this(state, options, System.currentTimeMillis(), preserveAliases); + Context(ClusterState state, IndicesOptions options, boolean preserveAliases, boolean resolveToWriteIndex) { + this(state, options, System.currentTimeMillis(), preserveAliases, resolveToWriteIndex); } Context(ClusterState state, IndicesOptions options, long startTime) { - this(state, options, startTime, false); + this(state, options, startTime, false, false); } - Context(ClusterState state, IndicesOptions options, long startTime, boolean preserveAliases) { + Context(ClusterState state, IndicesOptions options, long startTime, boolean preserveAliases, boolean resolveToWriteIndex) { this.state = state; this.options = options; this.startTime = startTime; this.preserveAliases = preserveAliases; + this.resolveToWriteIndex = resolveToWriteIndex; } public ClusterState getState() { @@ -552,6 +585,14 @@ public long getStartTime() { boolean isPreserveAliases() { return preserveAliases; } + + /** + * This is used to require that aliases resolve to their write-index. It is currently not used in conjunction + * with preserveAliases. + */ + boolean isResolveToWriteIndex() { + return resolveToWriteIndex; + } } private interface ExpressionResolver { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 4ed2adc9a1c9f..c024388868359 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -471,6 +471,42 @@ public String[] getConcreteAllClosedIndices() { return allClosedIndices; } + /** + * Returns indexing routing for the given aliasOrIndex. Resolves routing from the alias metadata used + * in the write index. + */ + public String resolveWriteIndexRouting(@Nullable String routing, String aliasOrIndex) { + if (aliasOrIndex == null) { + return routing; + } + + AliasOrIndex result = getAliasAndIndexLookup().get(aliasOrIndex); + if (result == null || result.isAlias() == false) { + return routing; + } + AliasOrIndex.Alias alias = (AliasOrIndex.Alias) result; + IndexMetaData writeIndex = alias.getWriteIndex(); + if (writeIndex == null) { + throw new IllegalArgumentException("alias [" + aliasOrIndex + "] does not have a write index"); + } + AliasMetaData aliasMd = writeIndex.getAliases().get(alias.getAliasName()); + if (aliasMd.indexRouting() != null) { + if (aliasMd.indexRouting().indexOf(',') != -1) { + throw new IllegalArgumentException("index/alias [" + aliasOrIndex + "] provided with routing value [" + + aliasMd.getIndexRouting() + "] that resolved to several routing values, rejecting operation"); + } + if (routing != null) { + if (!routing.equals(aliasMd.indexRouting())) { + throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has index routing associated with it [" + + aliasMd.indexRouting() + "], and was provided with routing value [" + routing + "], rejecting operation"); + } + } + // Alias routing overrides the parent routing (if any). + return aliasMd.indexRouting(); + } + return routing; + } + /** * Returns indexing routing for the given index. */ diff --git a/server/src/main/java/org/elasticsearch/index/IndexWarmer.java b/server/src/main/java/org/elasticsearch/index/IndexWarmer.java index e06dc5d2e8156..98716e9545df6 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexWarmer.java +++ b/server/src/main/java/org/elasticsearch/index/IndexWarmer.java @@ -27,8 +27,6 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.IndexShard; @@ -121,16 +119,12 @@ private static class FieldDataWarmer implements IndexWarmer.Listener { public TerminationHandle warmReader(final IndexShard indexShard, final Engine.Searcher searcher) { final MapperService mapperService = indexShard.mapperService(); final Map warmUpGlobalOrdinals = new HashMap<>(); - DocumentMapper docMapper = mapperService.documentMapper(); - if (docMapper != null) { - for (FieldMapper fieldMapper : docMapper.mappers()) { - final MappedFieldType fieldType = fieldMapper.fieldType(); - final String indexName = fieldType.name(); - if (fieldType.eagerGlobalOrdinals() == false) { - continue; - } - warmUpGlobalOrdinals.put(indexName, fieldType); + for (MappedFieldType fieldType : mapperService.fieldTypes()) { + final String indexName = fieldType.name(); + if (fieldType.eagerGlobalOrdinals() == false) { + continue; } + warmUpGlobalOrdinals.put(indexName, fieldType); } final CountDownLatch latch = new CountDownLatch(warmUpGlobalOrdinals.size()); for (final MappedFieldType fieldType : warmUpGlobalOrdinals.values()) { diff --git a/server/src/main/java/org/elasticsearch/index/VersionType.java b/server/src/main/java/org/elasticsearch/index/VersionType.java index b350252dc9c41..eb05ff809076c 100644 --- a/server/src/main/java/org/elasticsearch/index/VersionType.java +++ b/server/src/main/java/org/elasticsearch/index/VersionType.java @@ -85,13 +85,6 @@ public boolean validateVersionForReads(long version) { // not allowing Versions.NOT_FOUND as it is not a valid input value. return version > 0L || version == Versions.MATCH_ANY; } - - @Override - public VersionType versionTypeForReplicationAndRecovery() { - // replicas get the version from the primary after increment. The same version is stored in - // the transaction log. -> the should use the external semantics. - return EXTERNAL; - } }, EXTERNAL((byte) 1) { @Override @@ -333,14 +326,6 @@ public byte getValue() { */ public abstract boolean validateVersionForReads(long version); - /** - * Some version types require different semantics for primary and replicas. This version allows - * the type to override the default behavior. - */ - public VersionType versionTypeForReplicationAndRecovery() { - return this; - } - public static VersionType fromString(String versionType) { if ("internal".equals(versionType)) { return INTERNAL; diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java index bf6b2fd7c5b47..4df0375f31cab 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java @@ -27,8 +27,7 @@ public abstract class AbstractTokenizerFactory extends AbstractIndexComponent implements TokenizerFactory { protected final Version version; - // TODO drop `String ignored` in a followup - public AbstractTokenizerFactory(IndexSettings indexSettings, String ignored, Settings settings) { + public AbstractTokenizerFactory(IndexSettings indexSettings, Settings settings) { super(indexSettings); this.version = Analysis.parseAnalysisVersion(this.indexSettings.getSettings(), settings, logger); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java index ed8d2b452c2d5..2e4473f3b0e6d 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java @@ -31,7 +31,7 @@ public class StandardTokenizerFactory extends AbstractTokenizerFactory { private final int maxTokenLength; public StandardTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java index be96dbd65602b..4abed5a62ce71 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java @@ -21,6 +21,6 @@ import org.apache.lucene.analysis.Tokenizer; -public interface TokenizerFactory { // TODO replace with Supplier +public interface TokenizerFactory { Tokenizer create(); } diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 9c93d2bf25608..b5b4e26cdfc61 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1187,6 +1187,7 @@ public static class Index extends Operation { public Index(Term uid, ParsedDocument doc, long seqNo, long primaryTerm, long version, VersionType versionType, Origin origin, long startTime, long autoGeneratedIdTimestamp, boolean isRetry) { super(uid, seqNo, primaryTerm, version, versionType, origin, startTime); + assert (origin == Origin.PRIMARY) == (versionType != null) : "invalid version_type=" + versionType + " for origin=" + origin; this.doc = doc; this.isRetry = isRetry; this.autoGeneratedIdTimestamp = autoGeneratedIdTimestamp; @@ -1264,6 +1265,7 @@ public static class Delete extends Operation { public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version, VersionType versionType, Origin origin, long startTime) { super(uid, seqNo, primaryTerm, version, versionType, origin, startTime); + assert (origin == Origin.PRIMARY) == (versionType != null) : "invalid version_type=" + versionType + " for origin=" + origin; this.type = Objects.requireNonNull(type); this.id = Objects.requireNonNull(id); } diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 81fe7f60bc5d6..1427509a2ec7c 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -740,7 +740,7 @@ private boolean canOptimizeAddDocument(Index index) { return true; case PEER_RECOVERY: case REPLICA: - assert index.version() == 1 && index.versionType() == VersionType.EXTERNAL + assert index.version() == 1 && index.versionType() == null : "version: " + index.version() + " type: " + index.versionType(); return true; case LOCAL_TRANSLOG_RECOVERY: @@ -755,21 +755,7 @@ private boolean canOptimizeAddDocument(Index index) { protected boolean assertPrimaryCanOptimizeAddDocument(final Index index) { assert (index.version() == Versions.MATCH_ANY && index.versionType() == VersionType.INTERNAL) - : "version: " + index.version() + " type: " + index.versionType(); - return true; - } - - private boolean assertVersionType(final Engine.Operation operation) { - if (operation.origin() == Operation.Origin.REPLICA || - operation.origin() == Operation.Origin.PEER_RECOVERY || - operation.origin() == Operation.Origin.LOCAL_TRANSLOG_RECOVERY) { - // ensure that replica operation has expected version type for replication - // ensure that versionTypeForReplicationAndRecovery is idempotent - assert operation.versionType() == operation.versionType().versionTypeForReplicationAndRecovery() - : "unexpected version type in request from [" + operation.origin().name() + "] " + - "found [" + operation.versionType().name() + "] " + - "expected [" + operation.versionType().versionTypeForReplicationAndRecovery().name() + "]"; - } + : "version: " + index.version() + " type: " + index.versionType(); return true; } @@ -812,7 +798,6 @@ public IndexResult index(Index index) throws IOException { try (ReleasableLock releasableLock = readLock.acquire()) { ensureOpen(); assert assertIncomingSequenceNumber(index.origin(), index.seqNo()); - assert assertVersionType(index); try (Releasable ignored = versionMap.acquireLock(index.uid().bytes()); Releasable indexThrottle = doThrottle ? () -> {} : throttle.acquireThrottle()) { lastWriteNanos = index.startTime(); @@ -911,9 +896,6 @@ protected final IndexingStrategy planIndexingAsNonPrimary(Index index) throws IO "max_seqno non-append-only [" + maxSeqNoOfNonAppendOnlyOperations.get() + "], seqno of index [" + index.seqNo() + "]"; } versionMap.enforceSafeAccess(); - // drop out of order operations - assert index.versionType().versionTypeForReplicationAndRecovery() == index.versionType() : - "resolving out of order delivery based on versioning but version type isn't fit for it. got [" + index.versionType() + "]"; // unlike the primary, replicas don't really care to about creation status of documents // this allows to ignore the case where a document was found in the live version maps in // a delete state and return false for the created flag in favor of code simplicity @@ -1178,7 +1160,6 @@ private void updateDocs(final Term uid, final List docs, public DeleteResult delete(Delete delete) throws IOException { versionMap.enforceSafeAccess(); assert Objects.equals(delete.uid().field(), IdFieldMapper.NAME) : delete.uid().field(); - assert assertVersionType(delete); assert assertIncomingSequenceNumber(delete.origin(), delete.seqNo()); final DeleteResult deleteResult; // NOTE: we don't throttle this when merges fall behind because delete-by-id does not create new segments: @@ -1235,10 +1216,6 @@ protected DeletionStrategy deletionStrategyForOperation(final Delete delete) thr protected final DeletionStrategy planDeletionAsNonPrimary(Delete delete) throws IOException { assertNonPrimaryOrigin(delete); - // drop out of order operations - assert delete.versionType().versionTypeForReplicationAndRecovery() == delete.versionType() : - "resolving out of order delivery based on versioning but version type isn't fit for it. got [" - + delete.versionType() + "]"; maxSeqNoOfNonAppendOnlyOperations.updateAndGet(curr -> Math.max(delete.seqNo(), curr)); assert maxSeqNoOfNonAppendOnlyOperations.get() >= delete.seqNo() : "max_seqno of non-append-only was not updated;" + "max_seqno non-append-only [" + maxSeqNoOfNonAppendOnlyOperations.get() + "], seqno of delete [" + delete.seqNo() + "]"; diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java index 6b0848e79b5a2..a39d763238205 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.core.internal.io.IOUtils; -import org.elasticsearch.index.VersionType; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MapperService; @@ -255,13 +254,13 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException { final String type = fields.uid().type(); final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); if (isTombstone) { - op = new Translog.Delete(type, id, uid, seqNo, primaryTerm, version, VersionType.EXTERNAL); + op = new Translog.Delete(type, id, uid, seqNo, primaryTerm, version); assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Delete op but soft_deletes field is not set [" + op + "]"; } else { final BytesReference source = fields.source(); // TODO: pass the latest timestamp from engine. final long autoGeneratedIdTimestamp = -1; - op = new Translog.Index(type, id, seqNo, primaryTerm, version, VersionType.EXTERNAL, + op = new Translog.Index(type, id, seqNo, primaryTerm, version, source == null ? null : source.toBytesRef().bytes, fields.routing(), autoGeneratedIdTimestamp); } } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java index 6d888bd63e3ca..fedad6e134b46 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLoggerFactory; -import org.elasticsearch.script.ScriptModule; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; @@ -126,11 +125,8 @@ protected void resize(int newSize) { public long getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return 0L; + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); } return values[0]; } @@ -172,11 +168,8 @@ public Dates(SortedNumericDocValues in) { */ public ReadableDateTime getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return EPOCH; + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); } return get(0); } @@ -277,11 +270,8 @@ public SortedNumericDoubleValues getInternalValues() { public double getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return 0d; + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); } return values[0]; } @@ -337,11 +327,8 @@ protected void resize(int newSize) { public GeoPoint getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + + throw new IllegalStateException("A document doesn't have a value for a field! " + "Use doc[].size()==0 to check if a document is missing a field!"); - } - return null; } return values[0]; } @@ -454,11 +441,8 @@ protected void resize(int newSize) { public boolean getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return false; + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); } return values[0]; } @@ -544,11 +528,8 @@ public String get(int index) { public String getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + + throw new IllegalStateException("A document doesn't have a value for a field! " + "Use doc[].size()==0 to check if a document is missing a field!"); - } - return null; } return get(0); } @@ -572,11 +553,8 @@ public BytesRef get(int index) { public BytesRef getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + + throw new IllegalStateException("A document doesn't have a value for a field! " + "Use doc[].size()==0 to check if a document is missing a field!"); - } - return new BytesRef(); } return get(0); } diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java index bd1fd69eb7478..c5d5a688ed929 100644 --- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java +++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java @@ -19,11 +19,8 @@ package org.elasticsearch.index.fieldvisitor; import org.apache.lucene.index.FieldInfo; -import org.elasticsearch.common.regex.Regex; import java.io.IOException; -import java.util.Collections; -import java.util.List; import java.util.Set; /** @@ -35,16 +32,10 @@ public class CustomFieldsVisitor extends FieldsVisitor { private final Set fields; - private final List patterns; - public CustomFieldsVisitor(Set fields, List patterns, boolean loadSource) { + public CustomFieldsVisitor(Set fields, boolean loadSource) { super(loadSource); this.fields = fields; - this.patterns = patterns; - } - - public CustomFieldsVisitor(Set fields, boolean loadSource) { - this(fields, Collections.emptyList(), loadSource); } @Override @@ -55,11 +46,6 @@ public Status needsField(FieldInfo fieldInfo) throws IOException { if (fields.contains(fieldInfo.name)) { return Status.YES; } - for (String pattern : patterns) { - if (Regex.simpleMatch(pattern, fieldInfo.name)) { - return Status.YES; - } - } return Status.NO; } } diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index a759f6a676714..5c5554cddcfc7 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -39,7 +39,7 @@ import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; @@ -202,7 +202,7 @@ private GetResult innerGetLoadFromStoredFields(String type, String id, String[] if (gFields != null && gFields.length > 0) { for (String field : gFields) { - FieldMapper fieldMapper = docMapper.mappers().getMapper(field); + Mapper fieldMapper = docMapper.mappers().getMapper(field); if (fieldMapper == null) { if (docMapper.objectMappers().get(field) != null) { // Only fail if we know it is a object field, missing paths / fields shouldn't fail. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index 9193ca209ba23..f70c003846495 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -28,10 +28,10 @@ import java.util.Iterator; import java.util.Map; -public final class DocumentFieldMappers implements Iterable { +public final class DocumentFieldMappers implements Iterable { /** Full field name to mapper */ - private final Map fieldMappers; + private final Map fieldMappers; private final FieldNameAnalyzer indexAnalyzer; private final FieldNameAnalyzer searchAnalyzer; @@ -44,8 +44,12 @@ private static void put(Map analyzers, String key, Analyzer va analyzers.put(key, value); } - public DocumentFieldMappers(Collection mappers, Analyzer defaultIndex, Analyzer defaultSearch, Analyzer defaultSearchQuote) { - Map fieldMappers = new HashMap<>(); + public DocumentFieldMappers(Collection mappers, + Collection aliasMappers, + Analyzer defaultIndex, + Analyzer defaultSearch, + Analyzer defaultSearchQuote) { + Map fieldMappers = new HashMap<>(); Map indexAnalyzers = new HashMap<>(); Map searchAnalyzers = new HashMap<>(); Map searchQuoteAnalyzers = new HashMap<>(); @@ -56,14 +60,24 @@ public DocumentFieldMappers(Collection mappers, Analyzer defaultInd put(searchAnalyzers, fieldType.name(), fieldType.searchAnalyzer(), defaultSearch); put(searchQuoteAnalyzers, fieldType.name(), fieldType.searchQuoteAnalyzer(), defaultSearchQuote); } + + for (FieldAliasMapper aliasMapper : aliasMappers) { + fieldMappers.put(aliasMapper.name(), aliasMapper); + } + this.fieldMappers = Collections.unmodifiableMap(fieldMappers); this.indexAnalyzer = new FieldNameAnalyzer(indexAnalyzers); this.searchAnalyzer = new FieldNameAnalyzer(searchAnalyzers); this.searchQuoteAnalyzer = new FieldNameAnalyzer(searchQuoteAnalyzers); } - /** Returns the mapper for the given field */ - public FieldMapper getMapper(String field) { + /** + * Returns the leaf mapper associated with this field name. Note that the returned mapper + * could be either a concrete {@link FieldMapper}, or a {@link FieldAliasMapper}. + * + * To access a field's type information, {@link MapperService#fullName} should be used instead. + */ + public Mapper getMapper(String field) { return fieldMappers.get(field); } @@ -87,7 +101,7 @@ public Analyzer searchQuoteAnalyzer() { return this.searchQuoteAnalyzer; } - public Iterator iterator() { + public Iterator iterator() { return fieldMappers.values().iterator(); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index db1db658f620c..467ce2a4c1203 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -141,15 +141,18 @@ public DocumentMapper(MapperService mapperService, Mapping mapping) { // collect all the mappers for this type List newObjectMappers = new ArrayList<>(); List newFieldMappers = new ArrayList<>(); + List newFieldAliasMappers = new ArrayList<>(); for (MetadataFieldMapper metadataMapper : this.mapping.metadataMappers) { if (metadataMapper instanceof FieldMapper) { newFieldMappers.add(metadataMapper); } } - MapperUtils.collect(this.mapping.root, newObjectMappers, newFieldMappers); + MapperUtils.collect(this.mapping.root, + newObjectMappers, newFieldMappers, newFieldAliasMappers); final IndexAnalyzers indexAnalyzers = mapperService.getIndexAnalyzers(); this.fieldMappers = new DocumentFieldMappers(newFieldMappers, + newFieldAliasMappers, indexAnalyzers.getDefaultIndexAnalyzer(), indexAnalyzers.getDefaultSearchAnalyzer(), indexAnalyzers.getDefaultSearchQuoteAnalyzer()); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 18e92474498b3..85123f602edf8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -460,13 +460,18 @@ private static ParseContext nestedContext(ParseContext context, ObjectMapper map private static void parseObjectOrField(ParseContext context, Mapper mapper) throws IOException { if (mapper instanceof ObjectMapper) { parseObjectOrNested(context, (ObjectMapper) mapper); - } else { - FieldMapper fieldMapper = (FieldMapper)mapper; + } else if (mapper instanceof FieldMapper) { + FieldMapper fieldMapper = (FieldMapper) mapper; Mapper update = fieldMapper.parse(context); if (update != null) { context.addDynamicMapper(update); } parseCopyFields(context, fieldMapper.copyTo().copyToFields()); + } else if (mapper instanceof FieldAliasMapper) { + throw new IllegalArgumentException("Cannot write to a field alias [" + mapper.name() + "]."); + } else { + throw new IllegalStateException("The provided mapper [" + mapper.name() + "] has an unrecognized type [" + + mapper.getClass().getSimpleName() + "]."); } } @@ -828,9 +833,16 @@ private static void parseCopyFields(ParseContext context, List copyToFie /** Creates an copy of the current field with given field name and boost */ private static void parseCopy(String field, ParseContext context) throws IOException { - FieldMapper fieldMapper = context.docMapper().mappers().getMapper(field); - if (fieldMapper != null) { - fieldMapper.parse(context); + Mapper mapper = context.docMapper().mappers().getMapper(field); + if (mapper != null) { + if (mapper instanceof FieldMapper) { + ((FieldMapper) mapper).parse(context); + } else if (mapper instanceof FieldAliasMapper) { + throw new IllegalArgumentException("Cannot copy to a field alias [" + mapper.name() + "]."); + } else { + throw new IllegalStateException("The provided mapper [" + mapper.name() + + "] has an unrecognized type [" + mapper.getClass().getSimpleName() + "]."); + } } else { // The path of the dest field might be completely different from the current one so we need to reset it context = context.overridePath(new ContentPath(0)); @@ -838,8 +850,8 @@ private static void parseCopy(String field, ParseContext context) throws IOExcep final String[] paths = splitAndValidatePath(field); final String fieldName = paths[paths.length-1]; Tuple parentMapperTuple = getDynamicParentMapper(context, paths, null); - ObjectMapper mapper = parentMapperTuple.v2(); - parseDynamicValue(context, mapper, fieldName, context.parser().currentToken()); + ObjectMapper objectMapper = parentMapperTuple.v2(); + parseDynamicValue(context, objectMapper, fieldName, context.parser().currentToken()); for (int i = 0; i < parentMapperTuple.v1(); i++) { context.path().remove(); } @@ -850,47 +862,46 @@ private static Tuple getDynamicParentMapper(ParseContext ObjectMapper currentParent) { ObjectMapper mapper = currentParent == null ? context.root() : currentParent; int pathsAdded = 0; - ObjectMapper parent = mapper; - for (int i = 0; i < paths.length-1; i++) { - String currentPath = context.path().pathAsText(paths[i]); - FieldMapper existingFieldMapper = context.docMapper().mappers().getMapper(currentPath); - if (existingFieldMapper != null) { - throw new MapperParsingException( - "Could not dynamically add mapping for field [{}]. Existing mapping for [{}] must be of type object but found [{}].", - null, String.join(".", paths), currentPath, existingFieldMapper.fieldType.typeName()); - } - mapper = context.docMapper().objectMappers().get(currentPath); - if (mapper == null) { - // One mapping is missing, check if we are allowed to create a dynamic one. - ObjectMapper.Dynamic dynamic = dynamicOrDefault(parent, context); - - switch (dynamic) { - case STRICT: - throw new StrictDynamicMappingException(parent.fullPath(), paths[i]); - case TRUE: - Mapper.Builder builder = context.root().findTemplateBuilder(context, paths[i], XContentFieldType.OBJECT); - if (builder == null) { - builder = new ObjectMapper.Builder(paths[i]).enabled(true); - } - Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings().getSettings(), - context.path()); - mapper = (ObjectMapper) builder.build(builderContext); - if (mapper.nested() != ObjectMapper.Nested.NO) { - throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) - + "]) through `copy_to` or dots in field names"); - } - context.addDynamicMapper(mapper); - break; - case FALSE: - // Should not dynamically create any more mappers so return the last mapper - return new Tuple<>(pathsAdded, parent); + ObjectMapper parent = mapper; + for (int i = 0; i < paths.length-1; i++) { + String currentPath = context.path().pathAsText(paths[i]); + Mapper existingFieldMapper = context.docMapper().mappers().getMapper(currentPath); + if (existingFieldMapper != null) { + throw new MapperParsingException( + "Could not dynamically add mapping for field [{}]. Existing mapping for [{}] must be of type object but found [{}].", + null, String.join(".", paths), currentPath, existingFieldMapper.typeName()); + } + mapper = context.docMapper().objectMappers().get(currentPath); + if (mapper == null) { + // One mapping is missing, check if we are allowed to create a dynamic one. + ObjectMapper.Dynamic dynamic = dynamicOrDefault(parent, context); + + switch (dynamic) { + case STRICT: + throw new StrictDynamicMappingException(parent.fullPath(), paths[i]); + case TRUE: + Mapper.Builder builder = context.root().findTemplateBuilder(context, paths[i], XContentFieldType.OBJECT); + if (builder == null) { + builder = new ObjectMapper.Builder(paths[i]).enabled(true); + } + Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings().getSettings(), + context.path()); mapper = (ObjectMapper) builder.build(builderContext); + if (mapper.nested() != ObjectMapper.Nested.NO) { + throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + + "]) through `copy_to` or dots in field names"); + } + context.addDynamicMapper(mapper); + break; + case FALSE: + // Should not dynamically create any more mappers so return the last mapper + return new Tuple<>(pathsAdded, parent); - } } - context.path().add(paths[i]); - pathsAdded++; - parent = mapper; } + context.path().add(paths[i]); + pathsAdded++; + parent = mapper; + } return new Tuple<>(pathsAdded, mapper); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java new file mode 100644 index 0000000000000..8d87b4f73ec43 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java @@ -0,0 +1,132 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.support.XContentMapValues; + +import java.io.IOException; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; + +/** + * A mapper for field aliases. + * + * A field alias has no concrete field mappings of its own, but instead points to another field by + * its path. Once defined, an alias can be used in place of the concrete field name in search requests. + */ +public final class FieldAliasMapper extends Mapper { + public static final String CONTENT_TYPE = "alias"; + + public static class Names { + public static final String PATH = "path"; + } + + private final String name; + private final String path; + + public FieldAliasMapper(String simpleName, + String name, + String path) { + super(simpleName); + this.name = name; + this.path = path; + } + + @Override + public String name() { + return name; + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + public String path() { + return path; + } + + @Override + public Mapper merge(Mapper mergeWith) { + if (!(mergeWith instanceof FieldAliasMapper)) { + throw new IllegalArgumentException("Cannot merge a field alias mapping [" + + name() + "] with a mapping that is not for a field alias."); + } + return mergeWith; + } + + @Override + public Mapper updateFieldType(Map fullNameToFieldType) { + return this; + } + + @Override + public Iterator iterator() { + return Collections.emptyIterator(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject(simpleName()) + .field("type", CONTENT_TYPE) + .field(Names.PATH, path) + .endObject(); + } + + public static class TypeParser implements Mapper.TypeParser { + @Override + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) + throws MapperParsingException { + FieldAliasMapper.Builder builder = new FieldAliasMapper.Builder(name); + Object pathField = node.remove(Names.PATH); + String path = XContentMapValues.nodeStringValue(pathField, null); + if (path == null) { + throw new MapperParsingException("The [path] property must be specified for field [" + name + "]."); + } + return builder.path(path); + } + } + + public static class Builder extends Mapper.Builder { + private String name; + private String path; + + protected Builder(String name) { + super(name); + this.name = name; + } + + public String name() { + return this.name; + } + + public Builder path(String path) { + this.path = path; + return this; + } + + public FieldAliasMapper build(BuilderContext context) { + String fullName = context.path().pathAsText(name); + return new FieldAliasMapper(name, fullName, path); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 977b930c41e5b..cbb008c9d0051 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -247,6 +247,11 @@ public String name() { return fieldType().name(); } + @Override + public String typeName() { + return fieldType.typeName(); + } + public MappedFieldType fieldType() { return fieldType; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index 069468ddb7a25..c7d92e9f829aa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -35,64 +35,115 @@ */ class FieldTypeLookup implements Iterable { - /** Full field name to field type */ final CopyOnWriteHashMap fullNameToFieldType; + private final CopyOnWriteHashMap aliasToConcreteName; - /** Create a new empty instance. */ FieldTypeLookup() { fullNameToFieldType = new CopyOnWriteHashMap<>(); + aliasToConcreteName = new CopyOnWriteHashMap<>(); } - private FieldTypeLookup(CopyOnWriteHashMap fullName) { - this.fullNameToFieldType = fullName; + private FieldTypeLookup(CopyOnWriteHashMap fullNameToFieldType, + CopyOnWriteHashMap aliasToConcreteName) { + this.fullNameToFieldType = fullNameToFieldType; + this.aliasToConcreteName = aliasToConcreteName; } /** * Return a new instance that contains the union of this instance and the field types - * from the provided fields. If a field already exists, the field type will be updated - * to use the new mappers field type. + * from the provided mappers. If a field already exists, its field type will be updated + * to use the new type from the given field mapper. Similarly if an alias already + * exists, it will be updated to reference the field type from the new mapper. */ - public FieldTypeLookup copyAndAddAll(String type, Collection fieldMappers) { + public FieldTypeLookup copyAndAddAll(String type, + Collection fieldMappers, + Collection fieldAliasMappers) { Objects.requireNonNull(type, "type must not be null"); if (MapperService.DEFAULT_MAPPING.equals(type)) { throw new IllegalArgumentException("Default mappings should not be added to the lookup"); } CopyOnWriteHashMap fullName = this.fullNameToFieldType; + CopyOnWriteHashMap aliases = this.aliasToConcreteName; for (FieldMapper fieldMapper : fieldMappers) { MappedFieldType fieldType = fieldMapper.fieldType(); MappedFieldType fullNameFieldType = fullName.get(fieldType.name()); - if (fullNameFieldType == null) { - // introduction of a new field - fullName = fullName.copyAndPut(fieldType.name(), fieldMapper.fieldType()); - } else { - // modification of an existing field - checkCompatibility(fullNameFieldType, fieldType); - if (fieldType.equals(fullNameFieldType) == false) { - fullName = fullName.copyAndPut(fieldType.name(), fieldMapper.fieldType()); - } + if (!Objects.equals(fieldType, fullNameFieldType)) { + validateField(fullNameFieldType, fieldType, aliases); + fullName = fullName.copyAndPut(fieldType.name(), fieldType); + } + } + + for (FieldAliasMapper fieldAliasMapper : fieldAliasMappers) { + String aliasName = fieldAliasMapper.name(); + String path = fieldAliasMapper.path(); + + validateAlias(aliasName, path, aliases, fullName); + aliases = aliases.copyAndPut(aliasName, path); + } + + return new FieldTypeLookup(fullName, aliases); + } + + /** + * Checks that the new field type is valid. + */ + private void validateField(MappedFieldType existingFieldType, + MappedFieldType newFieldType, + CopyOnWriteHashMap aliasToConcreteName) { + String fieldName = newFieldType.name(); + if (aliasToConcreteName.containsKey(fieldName)) { + throw new IllegalArgumentException("The name for field [" + fieldName + "] has already" + + " been used to define a field alias."); + } + + if (existingFieldType != null) { + List conflicts = new ArrayList<>(); + existingFieldType.checkCompatibility(newFieldType, conflicts); + if (conflicts.isEmpty() == false) { + throw new IllegalArgumentException("Mapper for [" + fieldName + + "] conflicts with existing mapping:\n" + conflicts.toString()); } } - return new FieldTypeLookup(fullName); } /** - * Checks if the given field type is compatible with an existing field type. - * An IllegalArgumentException is thrown in case of incompatibility. + * Checks that the new field alias is valid. + * + * Note that this method assumes that new concrete fields have already been processed, so that it + * can verify that an alias refers to an existing concrete field. */ - private void checkCompatibility(MappedFieldType existingFieldType, MappedFieldType newFieldType) { - List conflicts = new ArrayList<>(); - existingFieldType.checkCompatibility(newFieldType, conflicts); - if (conflicts.isEmpty() == false) { - throw new IllegalArgumentException("Mapper for [" + newFieldType.name() + "] conflicts with existing mapping:\n" + conflicts.toString()); + private void validateAlias(String aliasName, + String path, + CopyOnWriteHashMap aliasToConcreteName, + CopyOnWriteHashMap fullNameToFieldType) { + if (fullNameToFieldType.containsKey(aliasName)) { + throw new IllegalArgumentException("The name for field alias [" + aliasName + "] has already" + + " been used to define a concrete field."); + } + + if (path.equals(aliasName)) { + throw new IllegalArgumentException("Invalid [path] value [" + path + "] for field alias [" + + aliasName + "]: an alias cannot refer to itself."); + } + + if (aliasToConcreteName.containsKey(path)) { + throw new IllegalArgumentException("Invalid [path] value [" + path + "] for field alias [" + + aliasName + "]: an alias cannot refer to another alias."); + } + + if (!fullNameToFieldType.containsKey(path)) { + throw new IllegalArgumentException("Invalid [path] value [" + path + "] for field alias [" + + aliasName + "]: an alias must refer to an existing field in the mappings."); } } /** Returns the field for the given field */ public MappedFieldType get(String field) { - return fullNameToFieldType.get(field); + String concreteField = aliasToConcreteName.getOrDefault(field, field); + return fullNameToFieldType.get(concreteField); } /** @@ -105,6 +156,11 @@ public Collection simpleMatchToFullName(String pattern) { fields.add(fieldType.name()); } } + for (String aliasName : aliasToConcreteName.keySet()) { + if (Regex.simpleMatch(pattern, aliasName)) { + fields.add(aliasName); + } + } return fields; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java index 051ac9da7f2ec..4d17afae614b8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -173,6 +173,11 @@ public final String simpleName() { /** Returns the canonical name which uniquely identifies the mapper against other mappers in a type. */ public abstract String name(); + /** + * Returns a name representing the the type of this mapper. + */ + public abstract String typeName(); + /** Return the merge of {@code mergeWith} into this. * Both {@code this} and {@code mergeWith} will be left unmodified. */ public abstract Mapper merge(Mapper mergeWith); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeValidator.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeValidator.java new file mode 100644 index 0000000000000..440be98ad9ec9 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeValidator.java @@ -0,0 +1,214 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Stream; + +/** + * A utility class that helps validate certain aspects of a mappings update. + */ +class MapperMergeValidator { + + /** + * Validates the overall structure of the mapping addition, including whether + * duplicate fields are present, and if the provided fields have already been + * defined with a different data type. + * + * @param type The mapping type, for use in error messages. + * @param objectMappers The newly added object mappers. + * @param fieldMappers The newly added field mappers. + * @param fieldAliasMappers The newly added field alias mappers. + * @param fullPathObjectMappers All object mappers, indexed by their full path. + * @param fieldTypes All field and field alias mappers, collected into a lookup structure. + */ + public static void validateMapperStructure(String type, + Collection objectMappers, + Collection fieldMappers, + Collection fieldAliasMappers, + Map fullPathObjectMappers, + FieldTypeLookup fieldTypes) { + checkFieldUniqueness(type, objectMappers, fieldMappers, + fieldAliasMappers, fullPathObjectMappers, fieldTypes); + checkObjectsCompatibility(objectMappers, fullPathObjectMappers); + } + + private static void checkFieldUniqueness(String type, + Collection objectMappers, + Collection fieldMappers, + Collection fieldAliasMappers, + Map fullPathObjectMappers, + FieldTypeLookup fieldTypes) { + + // first check within mapping + Set objectFullNames = new HashSet<>(); + for (ObjectMapper objectMapper : objectMappers) { + String fullPath = objectMapper.fullPath(); + if (objectFullNames.add(fullPath) == false) { + throw new IllegalArgumentException("Object mapper [" + fullPath + "] is defined twice in mapping for type [" + type + "]"); + } + } + + Set fieldNames = new HashSet<>(); + Stream.concat(fieldMappers.stream(), fieldAliasMappers.stream()) + .forEach(mapper -> { + String name = mapper.name(); + if (objectFullNames.contains(name)) { + throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field in [" + type + "]"); + } else if (fieldNames.add(name) == false) { + throw new IllegalArgumentException("Field [" + name + "] is defined twice in [" + type + "]"); + } + }); + + // then check other types + for (String fieldName : fieldNames) { + if (fullPathObjectMappers.containsKey(fieldName)) { + throw new IllegalArgumentException("[" + fieldName + "] is defined as a field in mapping [" + type + + "] but this name is already used for an object in other types"); + } + } + + for (String objectPath : objectFullNames) { + if (fieldTypes.get(objectPath) != null) { + throw new IllegalArgumentException("[" + objectPath + "] is defined as an object in mapping [" + type + + "] but this name is already used for a field in other types"); + } + } + } + + private static void checkObjectsCompatibility(Collection objectMappers, + Map fullPathObjectMappers) { + for (ObjectMapper newObjectMapper : objectMappers) { + ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath()); + if (existingObjectMapper != null) { + // simulate a merge and ignore the result, we are just interested + // in exceptions here + existingObjectMapper.merge(newObjectMapper); + } + } + } + + /** + * Verifies that each field reference, e.g. the value of copy_to or the target + * of a field alias, corresponds to a valid part of the mapping. + * + * @param fieldMappers The newly added field mappers. + * @param fieldAliasMappers The newly added field alias mappers. + * @param fullPathObjectMappers All object mappers, indexed by their full path. + * @param fieldTypes All field and field alias mappers, collected into a lookup structure. + */ + public static void validateFieldReferences(List fieldMappers, + List fieldAliasMappers, + Map fullPathObjectMappers, + FieldTypeLookup fieldTypes) { + validateCopyTo(fieldMappers, fullPathObjectMappers, fieldTypes); + validateFieldAliasTargets(fieldAliasMappers, fullPathObjectMappers); + } + + private static void validateCopyTo(List fieldMappers, + Map fullPathObjectMappers, + FieldTypeLookup fieldTypes) { + for (FieldMapper mapper : fieldMappers) { + if (mapper.copyTo() != null && mapper.copyTo().copyToFields().isEmpty() == false) { + String sourceParent = parentObject(mapper.name()); + if (sourceParent != null && fieldTypes.get(sourceParent) != null) { + throw new IllegalArgumentException("[copy_to] may not be used to copy from a multi-field: [" + mapper.name() + "]"); + } + + final String sourceScope = getNestedScope(mapper.name(), fullPathObjectMappers); + for (String copyTo : mapper.copyTo().copyToFields()) { + String copyToParent = parentObject(copyTo); + if (copyToParent != null && fieldTypes.get(copyToParent) != null) { + throw new IllegalArgumentException("[copy_to] may not be used to copy to a multi-field: [" + copyTo + "]"); + } + + if (fullPathObjectMappers.containsKey(copyTo)) { + throw new IllegalArgumentException("Cannot copy to field [" + copyTo + "] since it is mapped as an object"); + } + + final String targetScope = getNestedScope(copyTo, fullPathObjectMappers); + checkNestedScopeCompatibility(sourceScope, targetScope); + } + } + } + } + + private static void validateFieldAliasTargets(List fieldAliasMappers, + Map fullPathObjectMappers) { + for (FieldAliasMapper mapper : fieldAliasMappers) { + String aliasName = mapper.name(); + String path = mapper.path(); + + String aliasScope = getNestedScope(aliasName, fullPathObjectMappers); + String pathScope = getNestedScope(path, fullPathObjectMappers); + + if (!Objects.equals(aliasScope, pathScope)) { + StringBuilder message = new StringBuilder("Invalid [path] value [" + path + "] for field alias [" + + aliasName + "]: an alias must have the same nested scope as its target. "); + message.append(aliasScope == null + ? "The alias is not nested" + : "The alias's nested scope is [" + aliasScope + "]"); + message.append(", but "); + message.append(pathScope == null + ? "the target is not nested." + : "the target's nested scope is [" + pathScope + "]."); + throw new IllegalArgumentException(message.toString()); + } + } + } + + private static String getNestedScope(String path, Map fullPathObjectMappers) { + for (String parentPath = parentObject(path); parentPath != null; parentPath = parentObject(parentPath)) { + ObjectMapper objectMapper = fullPathObjectMappers.get(parentPath); + if (objectMapper != null && objectMapper.nested().isNested()) { + return parentPath; + } + } + return null; + } + + private static void checkNestedScopeCompatibility(String source, String target) { + boolean targetIsParentOfSource; + if (source == null || target == null) { + targetIsParentOfSource = target == null; + } else { + targetIsParentOfSource = source.equals(target) || source.startsWith(target + "."); + } + if (targetIsParentOfSource == false) { + throw new IllegalArgumentException( + "Illegal combination of [copy_to] and [nested] mappings: [copy_to] may only copy data to the current nested " + + "document or any of its parents, however one [copy_to] directive is trying to copy data from nested object [" + + source + "] to [" + target + "]"); + } + } + + private static String parentObject(String field) { + int lastDot = field.lastIndexOf('.'); + if (lastDot == -1) { + return null; + } + return field.substring(0, lastDot); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 8988238d9277e..936e733400255 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -21,7 +21,6 @@ import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; @@ -395,15 +394,17 @@ private synchronized Map internalMerge(@Nullable Documen // check basic sanity of the new mapping List objectMappers = new ArrayList<>(); List fieldMappers = new ArrayList<>(); + List fieldAliasMappers = new ArrayList<>(); Collections.addAll(fieldMappers, newMapper.mapping().metadataMappers); - MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers); - checkFieldUniqueness(newMapper.type(), objectMappers, fieldMappers, fullPathObjectMappers, fieldTypes); - checkObjectsCompatibility(objectMappers, fullPathObjectMappers); + MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers, fieldAliasMappers); + + MapperMergeValidator.validateMapperStructure(newMapper.type(), objectMappers, fieldMappers, + fieldAliasMappers, fullPathObjectMappers, fieldTypes); checkPartitionedIndexConstraints(newMapper); // update lookup data-structures // this will in particular make sure that the merged fields are compatible with other types - fieldTypes = fieldTypes.copyAndAddAll(newMapper.type(), fieldMappers); + fieldTypes = fieldTypes.copyAndAddAll(newMapper.type(), fieldMappers, fieldAliasMappers); for (ObjectMapper objectMapper : objectMappers) { if (fullPathObjectMappers == this.fullPathObjectMappers) { @@ -417,7 +418,8 @@ private synchronized Map internalMerge(@Nullable Documen } } - validateCopyTo(fieldMappers, fullPathObjectMappers, fieldTypes); + MapperMergeValidator.validateFieldReferences(fieldMappers, fieldAliasMappers, + fullPathObjectMappers, fieldTypes); if (reason == MergeReason.MAPPING_UPDATE) { // this check will only be performed on the master node when there is @@ -482,7 +484,7 @@ private boolean assertMappersShareSameFieldType() { if (mapper != null) { List fieldMappers = new ArrayList<>(); Collections.addAll(fieldMappers, mapper.mapping().metadataMappers); - MapperUtils.collect(mapper.root(), new ArrayList<>(), fieldMappers); + MapperUtils.collect(mapper.root(), new ArrayList<>(), fieldMappers, new ArrayList<>()); for (FieldMapper fieldMapper : fieldMappers) { assert fieldMapper.fieldType() == fieldTypes.get(fieldMapper.name()) : fieldMapper.name(); } @@ -503,56 +505,6 @@ private boolean assertSerialization(DocumentMapper mapper) { return true; } - private static void checkFieldUniqueness(String type, Collection objectMappers, Collection fieldMappers, - Map fullPathObjectMappers, FieldTypeLookup fieldTypes) { - - // first check within mapping - final Set objectFullNames = new HashSet<>(); - for (ObjectMapper objectMapper : objectMappers) { - final String fullPath = objectMapper.fullPath(); - if (objectFullNames.add(fullPath) == false) { - throw new IllegalArgumentException("Object mapper [" + fullPath + "] is defined twice in mapping for type [" + type + "]"); - } - } - - final Set fieldNames = new HashSet<>(); - for (FieldMapper fieldMapper : fieldMappers) { - final String name = fieldMapper.name(); - if (objectFullNames.contains(name)) { - throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field in [" + type + "]"); - } else if (fieldNames.add(name) == false) { - throw new IllegalArgumentException("Field [" + name + "] is defined twice in [" + type + "]"); - } - } - - // then check other types - for (String fieldName : fieldNames) { - if (fullPathObjectMappers.containsKey(fieldName)) { - throw new IllegalArgumentException("[" + fieldName + "] is defined as a field in mapping [" + type - + "] but this name is already used for an object in other types"); - } - } - - for (String objectPath : objectFullNames) { - if (fieldTypes.get(objectPath) != null) { - throw new IllegalArgumentException("[" + objectPath + "] is defined as an object in mapping [" + type - + "] but this name is already used for a field in other types"); - } - } - } - - private static void checkObjectsCompatibility(Collection objectMappers, - Map fullPathObjectMappers) { - for (ObjectMapper newObjectMapper : objectMappers) { - ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath()); - if (existingObjectMapper != null) { - // simulate a merge and ignore the result, we are just interested - // in exceptions here - existingObjectMapper.merge(newObjectMapper); - } - } - } - private void checkNestedFieldsLimit(Map fullPathObjectMappers) { long allowedNestedFields = indexSettings.getValue(INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING); long actualNestedFields = 0; @@ -609,66 +561,6 @@ private static void checkIndexSortCompatibility(IndexSortConfig sortConfig, bool } } - private static void validateCopyTo(List fieldMappers, Map fullPathObjectMappers, - FieldTypeLookup fieldTypes) { - for (FieldMapper mapper : fieldMappers) { - if (mapper.copyTo() != null && mapper.copyTo().copyToFields().isEmpty() == false) { - String sourceParent = parentObject(mapper.name()); - if (sourceParent != null && fieldTypes.get(sourceParent) != null) { - throw new IllegalArgumentException("[copy_to] may not be used to copy from a multi-field: [" + mapper.name() + "]"); - } - - final String sourceScope = getNestedScope(mapper.name(), fullPathObjectMappers); - for (String copyTo : mapper.copyTo().copyToFields()) { - String copyToParent = parentObject(copyTo); - if (copyToParent != null && fieldTypes.get(copyToParent) != null) { - throw new IllegalArgumentException("[copy_to] may not be used to copy to a multi-field: [" + copyTo + "]"); - } - - if (fullPathObjectMappers.containsKey(copyTo)) { - throw new IllegalArgumentException("Cannot copy to field [" + copyTo + "] since it is mapped as an object"); - } - - final String targetScope = getNestedScope(copyTo, fullPathObjectMappers); - checkNestedScopeCompatibility(sourceScope, targetScope); - } - } - } - } - - private static String getNestedScope(String path, Map fullPathObjectMappers) { - for (String parentPath = parentObject(path); parentPath != null; parentPath = parentObject(parentPath)) { - ObjectMapper objectMapper = fullPathObjectMappers.get(parentPath); - if (objectMapper != null && objectMapper.nested().isNested()) { - return parentPath; - } - } - return null; - } - - private static void checkNestedScopeCompatibility(String source, String target) { - boolean targetIsParentOfSource; - if (source == null || target == null) { - targetIsParentOfSource = target == null; - } else { - targetIsParentOfSource = source.equals(target) || source.startsWith(target + "."); - } - if (targetIsParentOfSource == false) { - throw new IllegalArgumentException( - "Illegal combination of [copy_to] and [nested] mappings: [copy_to] may only copy data to the current nested " + - "document or any of its parents, however one [copy_to] directive is trying to copy data from nested object [" + - source + "] to [" + target + "]"); - } - } - - private static String parentObject(String field) { - int lastDot = field.lastIndexOf('.'); - if (lastDot == -1) { - return null; - } - return field.substring(0, lastDot); - } - public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { return documentParser.parse(mappingType, mappingSource, applyDefault ? defaultMappingSource : null); } @@ -729,6 +621,13 @@ public Collection simpleMatchToFullName(String pattern) { return fieldTypes.simpleMatchToFullName(pattern); } + /** + * Returns all mapped field types. + */ + public Iterable fieldTypes() { + return fieldTypes; + } + public ObjectMapper getObjectMapper(String name) { return fullPathObjectMappers.get(name); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java index ad57d72b345ab..70da6b73f312e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java @@ -24,17 +24,28 @@ enum MapperUtils { ; - /** Split mapper and its descendants into object and field mappers. */ - public static void collect(Mapper mapper, Collection objectMappers, Collection fieldMappers) { + /** + * Splits the provided mapper and its descendants into object, field, and field alias mappers. + */ + public static void collect(Mapper mapper, Collection objectMappers, + Collection fieldMappers, + Collection fieldAliasMappers) { if (mapper instanceof RootObjectMapper) { // root mapper isn't really an object mapper } else if (mapper instanceof ObjectMapper) { objectMappers.add((ObjectMapper)mapper); } else if (mapper instanceof FieldMapper) { fieldMappers.add((FieldMapper)mapper); + } else if (mapper instanceof FieldAliasMapper) { + fieldAliasMappers.add((FieldAliasMapper) mapper); + } else { + throw new IllegalStateException("Unrecognized mapper type [" + + mapper.getClass().getSimpleName() + "]."); } + + for (Mapper child : mapper) { - collect(child, objectMappers, fieldMappers); + collect(child, objectMappers, fieldMappers, fieldAliasMappers); } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index c3e3e41798d91..99ad393670036 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -359,6 +359,11 @@ public String name() { return this.fullPath; } + @Override + public String typeName() { + return CONTENT_TYPE; + } + public boolean isEnabled() { return this.enabled; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java index ffb548fd0f10f..71bd2e93d3039 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java @@ -35,6 +35,8 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -90,6 +92,8 @@ public MetadataFieldMapper getDefault(MappedFieldType fieldType, ParserContext c static final class TypeFieldType extends StringFieldType { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(ESLoggerFactory.getLogger(TypeFieldType.class)); + TypeFieldType() { } @@ -154,6 +158,29 @@ public Query termsQuery(List values, QueryShardContext context) { } } + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("range_single_type", + "Running [range] query on [_type] field for an index with a single type. As types are deprecated, this functionality will be removed in future releases."); + Query result = new MatchAllDocsQuery(); + String type = context.getMapperService().documentMapper().type(); + if (type != null) { + BytesRef typeBytes = new BytesRef(type); + if (lowerTerm != null) { + int comp = indexedValueForSearch(lowerTerm).compareTo(typeBytes); + if (comp > 0 || (comp == 0 && includeLower == false)) { + result = new MatchNoDocsQuery("[_type] was lexicographically smaller than lower bound of range"); + } + } + if (upperTerm != null) { + int comp = indexedValueForSearch(upperTerm).compareTo(typeBytes); + if (comp < 0 || (comp == 0 && includeUpper == false)) { + result = new MatchNoDocsQuery("[_type] was lexicographically greater than upper bound of range"); + } + } + } + return result; + } } /** diff --git a/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java index 280df7cfa6ad8..7a2373e5ad8b5 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java @@ -149,7 +149,7 @@ public static Query newFilter(QueryShardContext context, String fieldPattern) { } if (context.indexVersionCreated().before(Version.V_6_1_0)) { - return newLegacyExistsQuery(fields); + return newLegacyExistsQuery(context, fields); } if (fields.size() == 1) { @@ -164,22 +164,28 @@ public static Query newFilter(QueryShardContext context, String fieldPattern) { return new ConstantScoreQuery(boolFilterBuilder.build()); } - private static Query newLegacyExistsQuery(Collection fields) { + private static Query newLegacyExistsQuery(QueryShardContext context, Collection fields) { // We create TermsQuery directly here rather than using FieldNamesFieldType.termsQuery() // so we don't end up with deprecation warnings if (fields.size() == 1) { - Query filter = new TermQuery(new Term(FieldNamesFieldMapper.NAME, fields.iterator().next())); + Query filter = newLegacyExistsQuery(context, fields.iterator().next()); return new ConstantScoreQuery(filter); } BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder(); for (String field : fields) { - Query filter = new TermQuery(new Term(FieldNamesFieldMapper.NAME, field)); + Query filter = newLegacyExistsQuery(context, field); boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD); } return new ConstantScoreQuery(boolFilterBuilder.build()); } + private static Query newLegacyExistsQuery(QueryShardContext context, String field) { + MappedFieldType fieldType = context.fieldMapper(field); + String fieldName = fieldType != null ? fieldType.name() : field; + return new TermQuery(new Term(FieldNamesFieldMapper.NAME, fieldName)); + } + private static Query newFieldExistsQuery(QueryShardContext context, String field) { MappedFieldType fieldType = context.getMapperService().fullName(field); if (fieldType == null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 1f410a2564cdf..637d93212912f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; @@ -202,14 +203,18 @@ protected Query doToQuery(QueryShardContext context) throws IOException { multiTermQueryBuilder.getClass().getName() + ", should be " + MultiTermQuery.class.getName() + " but was " + subQuery.getClass().getName()); } + + PrefixQueryBuilder prefixBuilder = (PrefixQueryBuilder) multiTermQueryBuilder; + MappedFieldType fieldType = context.fieldMapper(prefixBuilder.fieldName()); + String fieldName = fieldType != null ? fieldType.name() : prefixBuilder.fieldName(); + if (context.getIndexSettings().getIndexVersionCreated().before(Version.V_6_4_0)) { /** * Indices created in this version do not index positions on the prefix field * so we cannot use it to match positional queries. Instead, we explicitly create the prefix * query on the main field to avoid the rewrite. */ - PrefixQueryBuilder prefixBuilder = (PrefixQueryBuilder) multiTermQueryBuilder; - PrefixQuery prefixQuery = new PrefixQuery(new Term(prefixBuilder.fieldName(), prefixBuilder.value())); + PrefixQuery prefixQuery = new PrefixQuery(new Term(fieldName, prefixBuilder.value())); if (prefixBuilder.rewrite() != null) { MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(prefixBuilder.rewrite(), null, LoggingDeprecationHandler.INSTANCE); @@ -218,15 +223,14 @@ protected Query doToQuery(QueryShardContext context) throws IOException { subQuery = prefixQuery; spanQuery = new SpanMultiTermQueryWrapper<>(prefixQuery); } else { - String origFieldName = ((PrefixQueryBuilder) multiTermQueryBuilder).fieldName(); - SpanTermQuery spanTermQuery = new SpanTermQuery(((TermQuery) subQuery).getTerm()); /** * Prefixes are indexed in a different field so we mask the term query with the original field * name. This is required because span_near and span_or queries don't work across different field. * The masking is safe because the prefix field is indexed using the same content than the original field * and the prefix analyzer preserves positions. */ - spanQuery = new FieldMaskingSpanQuery(spanTermQuery, origFieldName); + SpanTermQuery spanTermQuery = new SpanTermQuery(((TermQuery) subQuery).getTerm()); + spanQuery = new FieldMaskingSpanQuery(spanTermQuery, fieldName); } } else { if (subQuery instanceof MultiTermQuery == false) { diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java index d4333fa0bc5f0..ceeef6112ae46 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; import java.util.ArrayList; @@ -218,7 +219,8 @@ protected Query doToQuery(QueryShardContext context) throws IOException { } String spanNearFieldName = null; if (isGap) { - spanNearFieldName = ((SpanGapQueryBuilder) queryBuilder).fieldName(); + String fieldName = ((SpanGapQueryBuilder) queryBuilder).fieldName(); + spanNearFieldName = queryFieldName(context, fieldName); } else { spanNearFieldName = ((SpanQuery) query).getField(); } @@ -241,7 +243,9 @@ protected Query doToQuery(QueryShardContext context) throws IOException { isGap = queryBuilder instanceof SpanGapQueryBuilder; if (isGap) { String fieldName = ((SpanGapQueryBuilder) queryBuilder).fieldName(); - if (!spanNearFieldName.equals(fieldName)) { + String spanGapFieldName = queryFieldName(context, fieldName); + + if (!spanNearFieldName.equals(spanGapFieldName)) { throw new IllegalArgumentException("[span_near] clauses must have same field"); } int gap = ((SpanGapQueryBuilder) queryBuilder).width(); @@ -255,6 +259,11 @@ protected Query doToQuery(QueryShardContext context) throws IOException { return builder.build(); } + private String queryFieldName(QueryShardContext context, String fieldName) { + MappedFieldType fieldType = context.fieldMapper(fieldName); + return fieldType != null ? fieldType.name() : fieldName; + } + @Override protected int doHashCode() { return Objects.hash(clauses, slop, inOrder); @@ -273,11 +282,11 @@ public String getWriteableName() { } /** - * SpanGapQueryBuilder enables gaps in a SpanNearQuery. + * SpanGapQueryBuilder enables gaps in a SpanNearQuery. * Since, SpanGapQuery is private to SpanNearQuery, SpanGapQueryBuilder cannot * be used to generate a Query (SpanGapQuery) like another QueryBuilder. - * Instead, it just identifies a span_gap clause so that SpanNearQuery.addGap(int) - * can be invoked for it. + * Instead, it just identifies a span_gap clause so that SpanNearQuery.addGap(int) + * can be invoked for it. * This QueryBuilder is only applicable as a clause in SpanGapQueryBuilder but * yet to enforce this restriction. */ @@ -286,9 +295,9 @@ public static class SpanGapQueryBuilder implements SpanQueryBuilder { /** Name of field to match against. */ private final String fieldName; - + /** Width of the gap introduced. */ - private final int width; + private final int width; /** * Constructs a new SpanGapQueryBuilder term query. @@ -301,7 +310,7 @@ public SpanGapQueryBuilder(String fieldName, int width) { throw new IllegalArgumentException("[span_gap] field name is null or empty"); } //lucene has not coded any restriction on value of width. - //to-do : find if theoretically it makes sense to apply restrictions. + //to-do : find if theoretically it makes sense to apply restrictions. this.fieldName = fieldName; this.width = width; } @@ -396,7 +405,7 @@ public static SpanGapQueryBuilder fromXContent(XContentParser parser) throws IOE fieldName = currentFieldName; } else if (token.isValue()) { width = parser.intValue(); - } + } } SpanGapQueryBuilder result = new SpanGapQueryBuilder(fieldName, width); return result; @@ -420,7 +429,7 @@ public final int hashCode() { return Objects.hash(getClass(), fieldName, width); } - + @Override public final String toString() { return Strings.toString(this, true, true); diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java index fbada58f29477..c20df00a1093a 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java @@ -221,7 +221,7 @@ public static TermsSetQueryBuilder fromXContent(XContentParser parser) throws IO } @Override - protected Query doToQuery(QueryShardContext context) throws IOException { + protected Query doToQuery(QueryShardContext context) { if (values.isEmpty()) { return Queries.newMatchNoDocsQuery("No terms supplied for \"" + getName() + "\" query."); } @@ -230,6 +230,15 @@ protected Query doToQuery(QueryShardContext context) throws IOException { throw new BooleanQuery.TooManyClauses(); } + List queries = createTermQueries(context); + LongValuesSource longValuesSource = createValuesSource(context); + return new CoveringQuery(queries, longValuesSource); + } + + /** + * Visible only for testing purposes. + */ + List createTermQueries(QueryShardContext context) { final MappedFieldType fieldType = context.fieldMapper(fieldName); final List queries = new ArrayList<>(values.size()); for (Object value : values) { @@ -239,7 +248,11 @@ protected Query doToQuery(QueryShardContext context) throws IOException { queries.add(new TermQuery(new Term(fieldName, BytesRefs.toBytesRef(value)))); } } - final LongValuesSource longValuesSource; + return queries; + } + + private LongValuesSource createValuesSource(QueryShardContext context) { + LongValuesSource longValuesSource; if (minimumShouldMatchField != null) { MappedFieldType msmFieldType = context.fieldMapper(minimumShouldMatchField); if (msmFieldType == null) { @@ -253,13 +266,13 @@ protected Query doToQuery(QueryShardContext context) throws IOException { SearchScript.TERMS_SET_QUERY_CONTEXT); Map params = new HashMap<>(); params.putAll(minimumShouldMatchScript.getParams()); - params.put("num_terms", queries.size()); + params.put("num_terms", values.size()); SearchScript.LeafFactory leafFactory = factory.newFactory(params, context.lookup()); longValuesSource = new ScriptLongValueSource(minimumShouldMatchScript, leafFactory); } else { throw new IllegalStateException("No minimum should match has been specified"); } - return new CoveringQuery(queries, longValuesSource); + return longValuesSource; } static final class ScriptLongValueSource extends LongValuesSource { diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java index b3751afbc9c5d..df96ff87ec256 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java @@ -25,6 +25,8 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -88,10 +90,10 @@ public static Map parseFieldsAndWeights(List fields) { * @param mapperService The mapper service where to find the mapping. * @param field The field name to search. */ - public static FieldMapper getFieldMapper(MapperService mapperService, String field) { + public static Mapper getFieldMapper(MapperService mapperService, String field) { DocumentMapper mapper = mapperService.documentMapper(); if (mapper != null) { - FieldMapper fieldMapper = mapper.mappers().getMapper(field); + Mapper fieldMapper = mapper.mappers().getMapper(field); if (fieldMapper != null) { return fieldMapper; } @@ -167,23 +169,27 @@ public static Map resolveMappingField(QueryShardContext context, if (fieldSuffix != null && context.fieldMapper(fieldName + fieldSuffix) != null) { fieldName = fieldName + fieldSuffix; } - FieldMapper mapper = getFieldMapper(context.getMapperService(), fieldName); - if (mapper == null) { - // Unmapped fields are not ignored - fields.put(fieldOrPattern, weight); - continue; - } - if (acceptMetadataField == false && mapper instanceof MetadataFieldMapper) { - // Ignore metadata fields + + MappedFieldType fieldType = context.getMapperService().fullName(fieldName); + if (fieldType == null) { + // Note that we don't ignore unmapped fields. + fields.put(fieldName, weight); continue; } + // Ignore fields that are not in the allowed mapper types. Some // types do not support term queries, and thus we cannot generate // a special query for them. - String mappingType = mapper.fieldType().typeName(); + String mappingType = fieldType.typeName(); if (acceptAllTypes == false && ALLOWED_QUERY_MAPPER_TYPES.contains(mappingType) == false) { continue; } + + // Ignore metadata fields. + Mapper mapper = getFieldMapper(context.getMapperService(), fieldName); + if (acceptMetadataField == false && mapper instanceof MetadataFieldMapper) { + continue; + } fields.put(fieldName, weight); } checkForTooManyFields(fields); diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 23d7bce70fb04..d69072e0cb0da 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -415,10 +415,9 @@ public void updateShardState(final ShardRouting newRouting, if (state == IndexShardState.POST_RECOVERY && newRouting.active()) { assert currentRouting.active() == false : "we are in POST_RECOVERY, but our shard routing is active " + currentRouting; - - if (newRouting.primary() && currentRouting.isRelocationTarget() == false) { - replicationTracker.activatePrimaryMode(getLocalCheckpoint()); - } + assert currentRouting.isRelocationTarget() == false || currentRouting.primary() == false || + replicationTracker.isPrimaryMode() : + "a primary relocation is completed by the master, but primary mode is not active " + currentRouting; changeState(IndexShardState.STARTED, "global state is [" + newRouting.state() + "]"); } else if (currentRouting.primary() && currentRouting.relocating() && replicationTracker.isPrimaryMode() == false && @@ -434,7 +433,12 @@ public void updateShardState(final ShardRouting newRouting, final CountDownLatch shardStateUpdated = new CountDownLatch(1); if (newRouting.primary()) { - if (newPrimaryTerm != primaryTerm) { + if (newPrimaryTerm == primaryTerm) { + if (currentRouting.initializing() && currentRouting.isRelocationTarget() == false && newRouting.active()) { + // the master started a recovering primary, activate primary mode. + replicationTracker.activatePrimaryMode(getLocalCheckpoint()); + } + } else { assert currentRouting.primary() == false : "term is only increased as part of primary promotion"; /* Note that due to cluster state batching an initializing primary shard term can failed and re-assigned * in one state causing it's term to be incremented. Note that if both current shard state and new @@ -523,6 +527,11 @@ public void onFailure(Exception e) { } // set this last, once we finished updating all internal state. this.shardRouting = newRouting; + + assert this.shardRouting.primary() == false || + this.shardRouting.started() == false || // note that we use started and not active to avoid relocating shards + this.replicationTracker.isPrimaryMode() + : "an started primary must be in primary mode " + this.shardRouting; shardStateUpdated.countDown(); } if (currentRouting != null && currentRouting.active() == false && newRouting.active()) { @@ -647,22 +656,22 @@ private IndexShardState changeState(IndexShardState newState, String reason) { public Engine.IndexResult applyIndexOperationOnPrimary(long version, VersionType versionType, SourceToParse sourceToParse, long autoGeneratedTimestamp, boolean isRetry) throws IOException { + assert versionType.validateVersionForWrites(version); return applyIndexOperation(SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm, version, versionType, autoGeneratedTimestamp, isRetry, Engine.Operation.Origin.PRIMARY, sourceToParse); } - public Engine.IndexResult applyIndexOperationOnReplica(long seqNo, long version, VersionType versionType, - long autoGeneratedTimeStamp, boolean isRetry, SourceToParse sourceToParse) + public Engine.IndexResult applyIndexOperationOnReplica(long seqNo, long version, long autoGeneratedTimeStamp, + boolean isRetry, SourceToParse sourceToParse) throws IOException { - return applyIndexOperation(seqNo, primaryTerm, version, versionType, autoGeneratedTimeStamp, isRetry, + return applyIndexOperation(seqNo, primaryTerm, version, null, autoGeneratedTimeStamp, isRetry, Engine.Operation.Origin.REPLICA, sourceToParse); } - private Engine.IndexResult applyIndexOperation(long seqNo, long opPrimaryTerm, long version, VersionType versionType, + private Engine.IndexResult applyIndexOperation(long seqNo, long opPrimaryTerm, long version, @Nullable VersionType versionType, long autoGeneratedTimeStamp, boolean isRetry, Engine.Operation.Origin origin, SourceToParse sourceToParse) throws IOException { assert opPrimaryTerm <= this.primaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.primaryTerm + "]"; - assert versionType.validateVersionForWrites(version) : "version [" + version + "], version type [" + versionType + "]"; ensureWriteAllowed(origin); Engine.Index operation; try { @@ -738,19 +747,18 @@ private Engine.NoOpResult noOp(Engine engine, Engine.NoOp noOp) { public Engine.DeleteResult applyDeleteOperationOnPrimary(long version, String type, String id, VersionType versionType) throws IOException { + assert versionType.validateVersionForWrites(version); return applyDeleteOperation(SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm, version, type, id, versionType, Engine.Operation.Origin.PRIMARY); } - public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long version, String type, String id, - VersionType versionType) throws IOException { - return applyDeleteOperation(seqNo, primaryTerm, version, type, id, versionType, Engine.Operation.Origin.REPLICA); + public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long version, String type, String id) throws IOException { + return applyDeleteOperation(seqNo, primaryTerm, version, type, id, null, Engine.Operation.Origin.REPLICA); } private Engine.DeleteResult applyDeleteOperation(long seqNo, long opPrimaryTerm, long version, String type, String id, - VersionType versionType, Engine.Operation.Origin origin) throws IOException { + @Nullable VersionType versionType, Engine.Operation.Origin origin) throws IOException { assert opPrimaryTerm <= this.primaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.primaryTerm + "]"; - assert versionType.validateVersionForWrites(version) : "version [" + version + "], version type [" + versionType + "]"; ensureWriteAllowed(origin); // When there is a single type, the unique identifier is only composed of the _id, // so there is no way to differenciate foo#1 from bar#1. This is especially an issue @@ -1213,14 +1221,14 @@ public Engine.Result applyTranslogOperation(Translog.Operation operation, Engine // we set canHaveDuplicates to true all the time such that we de-optimze the translog case and ensure that all // autoGeneratedID docs that are coming from the primary are updated correctly. result = applyIndexOperation(index.seqNo(), index.primaryTerm(), index.version(), - index.versionType().versionTypeForReplicationAndRecovery(), index.getAutoGeneratedIdTimestamp(), true, origin, + null, index.getAutoGeneratedIdTimestamp(), true, origin, source(shardId.getIndexName(), index.type(), index.id(), index.source(), XContentHelper.xContentType(index.source())).routing(index.routing())); break; case DELETE: final Translog.Delete delete = (Translog.Delete) operation; result = applyDeleteOperation(delete.seqNo(), delete.primaryTerm(), delete.version(), delete.type(), delete.id(), - delete.versionType().versionTypeForReplicationAndRecovery(), origin); + null, origin); break; case NO_OP: final Translog.NoOp noOp = (Translog.NoOp) operation; diff --git a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java index 22fc3c4575ea6..6c67cd1027007 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.tasks.Task; @@ -80,49 +81,26 @@ void setChunkSize(ByteSizeValue chunkSize) { // only settable for tests } public void resync(final IndexShard indexShard, final ActionListener listener) { - ActionListener resyncListener = null; + Translog.Snapshot snapshot = null; try { final long startingSeqNo = indexShard.getGlobalCheckpoint() + 1; - // TODO: A follow-up to make resync using soft-deletes - Translog.Snapshot snapshot = indexShard.newTranslogSnapshotFromMinSeqNo(startingSeqNo); final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); - resyncListener = new ActionListener() { - @Override - public void onResponse(final ResyncTask resyncTask) { - try { - snapshot.close(); - listener.onResponse(resyncTask); - } catch (final Exception e) { - onFailure(e); - } - } - - @Override - public void onFailure(final Exception e) { - try { - snapshot.close(); - } catch (final Exception inner) { - e.addSuppressed(inner); - } finally { - listener.onFailure(e); - } - } - }; - ShardId shardId = indexShard.shardId(); - + final ShardId shardId = indexShard.shardId(); // Wrap translog snapshot to make it synchronized as it is accessed by different threads through SnapshotSender. // Even though those calls are not concurrent, snapshot.next() uses non-synchronized state and is not multi-thread-compatible // Also fail the resync early if the shard is shutting down - Translog.Snapshot wrappedSnapshot = new Translog.Snapshot() { - + // TODO: A follow-up to make resync using soft-deletes + snapshot = indexShard.newTranslogSnapshotFromMinSeqNo(startingSeqNo); + final Translog.Snapshot originalSnapshot = snapshot; + final Translog.Snapshot wrappedSnapshot = new Translog.Snapshot() { @Override public synchronized void close() throws IOException { - snapshot.close(); + originalSnapshot.close(); } @Override public synchronized int totalOperations() { - return snapshot.totalOperations(); + return originalSnapshot.totalOperations(); } @Override @@ -133,15 +111,40 @@ public synchronized Translog.Operation next() throws IOException { } else { assert state == IndexShardState.STARTED : "resync should only happen on a started shard, but state was: " + state; } - return snapshot.next(); + return originalSnapshot.next(); } }; + final ActionListener resyncListener = new ActionListener() { + @Override + public void onResponse(final ResyncTask resyncTask) { + try { + wrappedSnapshot.close(); + listener.onResponse(resyncTask); + } catch (final Exception e) { + onFailure(e); + } + } + + @Override + public void onFailure(final Exception e) { + try { + wrappedSnapshot.close(); + } catch (final Exception inner) { + e.addSuppressed(inner); + } finally { + listener.onFailure(e); + } + } + }; + resync(shardId, indexShard.routingEntry().allocationId().getId(), indexShard.getPrimaryTerm(), wrappedSnapshot, startingSeqNo, maxSeqNo, resyncListener); } catch (Exception e) { - if (resyncListener != null) { - resyncListener.onFailure(e); - } else { + try { + IOUtils.close(snapshot); + } catch (IOException inner) { + e.addSuppressed(inner); + } finally { listener.onFailure(e); } } diff --git a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index c13c56beb5a20..bc77626b94277 100644 --- a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -48,7 +48,7 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.search.dfs.AggregatedDfs; @@ -162,8 +162,7 @@ private static void handleFieldWildcards(IndexShard indexShard, TermVectorsReque private static boolean isValidField(MappedFieldType fieldType) { // must be a string - if (fieldType instanceof KeywordFieldMapper.KeywordFieldType == false - && fieldType instanceof TextFieldMapper.TextFieldType == false) { + if (fieldType instanceof StringFieldType == false) { return false; } // and must be indexed diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index 39a077aa7822a..36a4b7cf0b170 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -1011,7 +1011,8 @@ public static class Index implements Operation { public static final int FORMAT_6_0 = 8; // since 6.0.0 public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0 - public static final int SERIALIZATION_FORMAT = FORMAT_NO_PARENT; + public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1; + public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE; private final String id; private final long autoGeneratedIdTimestamp; @@ -1019,7 +1020,6 @@ public static class Index implements Operation { private final long seqNo; private final long primaryTerm; private final long version; - private final VersionType versionType; private final BytesReference source; private final String routing; @@ -1034,8 +1034,9 @@ private Index(final StreamInput in) throws IOException { in.readOptionalString(); // _parent } this.version = in.readLong(); - this.versionType = VersionType.fromValue(in.readByte()); - assert versionType.validateVersionForWrites(this.version) : "invalid version for writes: " + this.version; + if (format < FORMAT_NO_VERSION_TYPE) { + in.readByte(); // _version_type + } this.autoGeneratedIdTimestamp = in.readLong(); seqNo = in.readLong(); primaryTerm = in.readLong(); @@ -1049,15 +1050,14 @@ public Index(Engine.Index index, Engine.IndexResult indexResult) { this.seqNo = indexResult.getSeqNo(); this.primaryTerm = index.primaryTerm(); this.version = indexResult.getVersion(); - this.versionType = index.versionType(); this.autoGeneratedIdTimestamp = index.getAutoGeneratedIdTimestamp(); } public Index(String type, String id, long seqNo, long primaryTerm, byte[] source) { - this(type, id, seqNo, primaryTerm, Versions.MATCH_ANY, VersionType.INTERNAL, source, null, -1); + this(type, id, seqNo, primaryTerm, Versions.MATCH_ANY, source, null, -1); } - public Index(String type, String id, long seqNo, long primaryTerm, long version, VersionType versionType, + public Index(String type, String id, long seqNo, long primaryTerm, long version, byte[] source, String routing, long autoGeneratedIdTimestamp) { this.type = type; this.id = id; @@ -1065,7 +1065,6 @@ public Index(String type, String id, long seqNo, long primaryTerm, long version, this.seqNo = seqNo; this.primaryTerm = primaryTerm; this.version = version; - this.versionType = versionType; this.routing = routing; this.autoGeneratedIdTimestamp = autoGeneratedIdTimestamp; } @@ -1110,24 +1109,22 @@ public long version() { return this.version; } - public VersionType versionType() { - return versionType; - } - @Override public Source getSource() { return source == null ? null : new Source(source, routing); } private void write(final StreamOutput out) throws IOException { - out.writeVInt(SERIALIZATION_FORMAT); + final int format = out.getVersion().onOrAfter(Version.V_7_0_0_alpha1) ? SERIALIZATION_FORMAT : FORMAT_6_0; + out.writeVInt(format); out.writeString(id); out.writeString(type); out.writeBytesReference(source); out.writeOptionalString(routing); out.writeLong(version); - - out.writeByte(versionType.getValue()); + if (format < FORMAT_NO_VERSION_TYPE) { + out.writeByte(VersionType.EXTERNAL.getValue()); + } out.writeLong(autoGeneratedIdTimestamp); out.writeLong(seqNo); out.writeLong(primaryTerm); @@ -1149,7 +1146,6 @@ public boolean equals(Object o) { primaryTerm != index.primaryTerm || id.equals(index.id) == false || type.equals(index.type) == false || - versionType != index.versionType || autoGeneratedIdTimestamp != index.autoGeneratedIdTimestamp || source.equals(index.source) == false) { return false; @@ -1168,7 +1164,6 @@ public int hashCode() { result = 31 * result + Long.hashCode(seqNo); result = 31 * result + Long.hashCode(primaryTerm); result = 31 * result + Long.hashCode(version); - result = 31 * result + versionType.hashCode(); result = 31 * result + source.hashCode(); result = 31 * result + (routing != null ? routing.hashCode() : 0); result = 31 * result + Long.hashCode(autoGeneratedIdTimestamp); @@ -1183,7 +1178,6 @@ public String toString() { ", seqNo=" + seqNo + ", primaryTerm=" + primaryTerm + ", version=" + version + - ", versionType=" + versionType + ", autoGeneratedIdTimestamp=" + autoGeneratedIdTimestamp + '}'; } @@ -1197,14 +1191,15 @@ public long getAutoGeneratedIdTimestamp() { public static class Delete implements Operation { private static final int FORMAT_6_0 = 4; // 6.0 - * - public static final int SERIALIZATION_FORMAT = FORMAT_6_0; + public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0 + public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1; + public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE; private final String type, id; private final Term uid; private final long seqNo; private final long primaryTerm; private final long version; - private final VersionType versionType; private Delete(final StreamInput in) throws IOException { final int format = in.readVInt();// SERIALIZATION_FORMAT @@ -1213,29 +1208,29 @@ private Delete(final StreamInput in) throws IOException { id = in.readString(); uid = new Term(in.readString(), in.readBytesRef()); this.version = in.readLong(); - this.versionType = VersionType.fromValue(in.readByte()); - assert versionType.validateVersionForWrites(this.version); + if (format < FORMAT_NO_VERSION_TYPE) { + in.readByte(); // versionType + } seqNo = in.readLong(); primaryTerm = in.readLong(); } public Delete(Engine.Delete delete, Engine.DeleteResult deleteResult) { - this(delete.type(), delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion(), delete.versionType()); + this(delete.type(), delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion()); } /** utility for testing */ public Delete(String type, String id, long seqNo, long primaryTerm, Term uid) { - this(type, id, uid, seqNo, primaryTerm, Versions.MATCH_ANY, VersionType.INTERNAL); + this(type, id, uid, seqNo, primaryTerm, Versions.MATCH_ANY); } - public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version, VersionType versionType) { + public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version) { this.type = Objects.requireNonNull(type); this.id = Objects.requireNonNull(id); this.uid = uid; this.seqNo = seqNo; this.primaryTerm = primaryTerm; this.version = version; - this.versionType = versionType; } @Override @@ -1274,23 +1269,22 @@ public long version() { return this.version; } - public VersionType versionType() { - return this.versionType; - } - @Override public Source getSource() { throw new IllegalStateException("trying to read doc source from delete operation"); } private void write(final StreamOutput out) throws IOException { - out.writeVInt(SERIALIZATION_FORMAT); + final int format = out.getVersion().onOrAfter(Version.V_7_0_0_alpha1) ? SERIALIZATION_FORMAT : FORMAT_6_0; + out.writeVInt(format); out.writeString(type); out.writeString(id); out.writeString(uid.field()); out.writeBytesRef(uid.bytes()); out.writeLong(version); - out.writeByte(versionType.getValue()); + if (format < FORMAT_NO_VERSION_TYPE) { + out.writeByte(VersionType.EXTERNAL.getValue()); + } out.writeLong(seqNo); out.writeLong(primaryTerm); } @@ -1309,8 +1303,7 @@ public boolean equals(Object o) { return version == delete.version && seqNo == delete.seqNo && primaryTerm == delete.primaryTerm && - uid.equals(delete.uid) && - versionType == delete.versionType; + uid.equals(delete.uid); } @Override @@ -1319,7 +1312,6 @@ public int hashCode() { result = 31 * result + Long.hashCode(seqNo); result = 31 * result + Long.hashCode(primaryTerm); result = 31 * result + Long.hashCode(version); - result = 31 * result + versionType.hashCode(); return result; } @@ -1330,7 +1322,6 @@ public String toString() { ", seqNo=" + seqNo + ", primaryTerm=" + primaryTerm + ", version=" + version + - ", versionType=" + versionType + '}'; } } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 8709bb41fb3ba..1ef34552b12e6 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -207,8 +207,8 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc // We don't store versionType in Lucene index, we need to exclude it from this check final boolean sameOp; if (newOp instanceof Translog.Index && prvOp instanceof Translog.Index) { - final Translog.Index o1 = (Translog.Index) newOp; - final Translog.Index o2 = (Translog.Index) prvOp; + final Translog.Index o1 = (Translog.Index) prvOp; + final Translog.Index o2 = (Translog.Index) newOp; sameOp = Objects.equals(o1.id(), o2.id()) && Objects.equals(o1.type(), o2.type()) && Objects.equals(o1.source(), o2.source()) && Objects.equals(o1.routing(), o2.routing()) && o1.primaryTerm() == o2.primaryTerm() && o1.seqNo() == o2.seqNo() diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java index 2d41491e3a746..a1038853c0670 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.FieldAliasMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; @@ -129,7 +130,9 @@ private Map getMappers(List mapperPlugi mappers.put(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); + mappers.put(FieldAliasMapper.CONTENT_TYPE, new FieldAliasMapper.TypeParser()); mappers.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { mappers.put(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser()); } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index bf4bd9c57cef0..a3da1dafe48d7 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -31,9 +31,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript; -import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; + /** * Manages building {@link ScriptService}. @@ -64,11 +62,6 @@ public class ScriptModule { ).collect(Collectors.toMap(c -> c.name, Function.identity())); } - public static final boolean EXCEPTION_FOR_MISSING_VALUE = - Booleans.parseBoolean(System.getProperty("es.scripting.exception_for_missing_value", "false")); - - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ScriptModule.class)); - private final ScriptService scriptService; public ScriptModule(Settings settings, List scriptPlugins) { @@ -92,10 +85,6 @@ public ScriptModule(Settings settings, List scriptPlugins) { } } } - if (EXCEPTION_FOR_MISSING_VALUE == false) - DEPRECATION_LOGGER.deprecated("Script: returning default values for missing document values is deprecated. " + - "Set system property '-Des.scripting.exception_for_missing_value=true' " + - "to make behaviour compatible with future major versions."); scriptService = new ScriptService(settings, Collections.unmodifiableMap(engines), Collections.unmodifiableMap(contexts)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index abdc195b514a1..df1bd115e2bfc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -85,6 +85,12 @@ public SignificantTermsAggregatorFactory(String name, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); + + if (!config.unmapped()) { + this.fieldType = config.fieldContext().fieldType(); + this.indexedFieldName = fieldType.name(); + } + this.includeExclude = includeExclude; this.executionHint = executionHint; this.filter = filterBuilder == null @@ -98,15 +104,6 @@ public SignificantTermsAggregatorFactory(String name, : searcher.count(filter); this.bucketCountThresholds = bucketCountThresholds; this.significanceHeuristic = significanceHeuristic; - setFieldInfo(context); - - } - - private void setFieldInfo(SearchContext context) { - if (!config.unmapped()) { - this.indexedFieldName = config.fieldContext().field(); - fieldType = context.smartNameFieldType(indexedFieldName); - } } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregationBuilder.java index 5e8bc2f4c1888..f0b85f979c233 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregationBuilder.java @@ -343,13 +343,10 @@ protected void doWriteTo(StreamOutput out) throws IOException { protected AggregatorFactory doBuild(SearchContext context, AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { SignificanceHeuristic executionHeuristic = this.significanceHeuristic.rewrite(context); - String[] execFieldNames = sourceFieldNames; - if (execFieldNames == null) { - execFieldNames = new String[] { fieldName }; - } + return new SignificantTextAggregatorFactory(name, includeExclude, filterBuilder, bucketCountThresholds, executionHeuristic, context, parent, subFactoriesBuilder, - fieldName, execFieldNames, filterDuplicateText, metaData); + fieldName, sourceFieldNames, filterDuplicateText, metaData); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorFactory.java index c35b0bfd2d095..ea9a8a91aea9e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorFactory.java @@ -71,12 +71,19 @@ public SignificantTextAggregatorFactory(String name, IncludeExclude includeExclu AggregatorFactories.Builder subFactoriesBuilder, String fieldName, String [] sourceFieldNames, boolean filterDuplicateText, Map metaData) throws IOException { super(name, context, parent, subFactoriesBuilder, metaData); + + // Note that if the field is unmapped (its field type is null), we don't fail, + // and just use the given field name as a placeholder. + this.fieldType = context.getQueryShardContext().fieldMapper(fieldName); + this.indexedFieldName = fieldType != null ? fieldType.name() : fieldName; + this.sourceFieldNames = sourceFieldNames == null + ? new String[] { indexedFieldName } + : sourceFieldNames; + this.includeExclude = includeExclude; this.filter = filterBuilder == null ? null : filterBuilder.toQuery(context.getQueryShardContext()); - this.indexedFieldName = fieldName; - this.sourceFieldNames = sourceFieldNames; this.filterDuplicateText = filterDuplicateText; IndexSearcher searcher = context.searcher(); // Important - need to use the doc count that includes deleted docs @@ -86,11 +93,8 @@ public SignificantTextAggregatorFactory(String name, IncludeExclude includeExclu : searcher.count(filter); this.bucketCountThresholds = bucketCountThresholds; this.significanceHeuristic = significanceHeuristic; - fieldType = context.getQueryShardContext().fieldMapper(indexedFieldName); - } - /** * Get the number of docs in the superset. */ @@ -133,13 +137,13 @@ private long getBackgroundFrequency(String value) throws IOException { } return context.searcher().count(query); } - + public long getBackgroundFrequency(BytesRef termBytes) throws IOException { String value = format.format(termBytes).toString(); return getBackgroundFrequency(value); - } + } + - @Override public void close() { try { @@ -154,11 +158,11 @@ public void close() { @Override protected Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) - throws IOException { + throws IOException { if (collectsFromSingleBucket == false) { return asMultiBucketAggregator(this, context, parent); } - + numberOfAggregatorsCreated++; BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(this.bucketCountThresholds); if (bucketCountThresholds.getShardSize() == SignificantTextAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) { @@ -166,7 +170,7 @@ protected Aggregator createInternal(Aggregator parent, boolean collectsFromSingl // Use default heuristic to avoid any wrong-ranking caused by // distributed counting but request double the usual amount. // We typically need more than the number of "top" terms requested - // by other aggregations as the significance algorithm is in less + // by other aggregations as the significance algorithm is in less // of a position to down-select at shard-level - some of the things // we want to find have only one occurrence on each shard and as // such are impossible to differentiate from non-significant terms @@ -177,9 +181,9 @@ protected Aggregator createInternal(Aggregator parent, boolean collectsFromSingl // TODO - need to check with mapping that this is indeed a text field.... - IncludeExclude.StringFilter incExcFilter = includeExclude == null ? null: + IncludeExclude.StringFilter incExcFilter = includeExclude == null ? null: includeExclude.convertToStringFilter(DocValueFormat.RAW); - + return new SignificantTextAggregator(name, factories, context, parent, pipelineAggregators, bucketCountThresholds, incExcFilter, significanceHeuristic, this, indexedFieldName, sourceFieldNames, filterDuplicateText, metaData); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 08251c6a73b94..64ed5f4479514 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -55,7 +54,7 @@ import org.elasticsearch.tasks.TaskCancelledException; import java.io.IOException; -import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -84,8 +83,7 @@ public void preProcess(SearchContext context) { @Override public void execute(SearchContext context) { final FieldsVisitor fieldsVisitor; - Set fieldNames = null; - List fieldNamePatterns = null; + Map> storedToRequestedFields = new HashMap<>(); StoredFieldsContext storedFieldsContext = context.storedFieldsContext(); if (storedFieldsContext == null) { @@ -98,39 +96,36 @@ public void execute(SearchContext context) { // disable stored fields entirely fieldsVisitor = null; } else { - for (String fieldName : context.storedFieldsContext().fieldNames()) { - if (fieldName.equals(SourceFieldMapper.NAME)) { + for (String fieldNameOrPattern : context.storedFieldsContext().fieldNames()) { + if (fieldNameOrPattern.equals(SourceFieldMapper.NAME)) { FetchSourceContext fetchSourceContext = context.hasFetchSourceContext() ? context.fetchSourceContext() - : FetchSourceContext.FETCH_SOURCE; + : FetchSourceContext.FETCH_SOURCE; context.fetchSourceContext(new FetchSourceContext(true, fetchSourceContext.includes(), fetchSourceContext.excludes())); continue; } - if (Regex.isSimpleMatchPattern(fieldName)) { - if (fieldNamePatterns == null) { - fieldNamePatterns = new ArrayList<>(); - } - fieldNamePatterns.add(fieldName); - } else { + + Collection fieldNames = context.mapperService().simpleMatchToFullName(fieldNameOrPattern); + for (String fieldName : fieldNames) { MappedFieldType fieldType = context.smartNameFieldType(fieldName); if (fieldType == null) { // Only fail if we know it is a object field, missing paths / fields shouldn't fail. if (context.getObjectMapper(fieldName) != null) { throw new IllegalArgumentException("field [" + fieldName + "] isn't a leaf field"); } + } else { + String storedField = fieldType.name(); + Set requestedFields = storedToRequestedFields.computeIfAbsent( + storedField, key -> new HashSet<>()); + requestedFields.add(fieldName); } - if (fieldNames == null) { - fieldNames = new HashSet<>(); - } - fieldNames.add(fieldName); } } boolean loadSource = context.sourceRequested(); - if (fieldNames == null && fieldNamePatterns == null) { + if (storedToRequestedFields.isEmpty()) { // empty list specified, default to disable _source if no explicit indication fieldsVisitor = new FieldsVisitor(loadSource); } else { - fieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames, - fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, loadSource); + fieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), loadSource); } } @@ -149,10 +144,11 @@ public void execute(SearchContext context) { final SearchHit searchHit; int rootDocId = findRootDocumentIfNested(context, subReaderContext, subDocId); if (rootDocId != -1) { - searchHit = createNestedSearchHit(context, docId, subDocId, rootDocId, fieldNames, fieldNamePatterns, - subReaderContext); + searchHit = createNestedSearchHit(context, docId, subDocId, rootDocId, + storedToRequestedFields, subReaderContext); } else { - searchHit = createSearchHit(context, fieldsVisitor, docId, subDocId, subReaderContext); + searchHit = createSearchHit(context, fieldsVisitor, docId, subDocId, + storedToRequestedFields, subReaderContext); } hits[index] = searchHit; @@ -190,21 +186,18 @@ private int findRootDocumentIfNested(SearchContext context, LeafReaderContext su return -1; } - private SearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, + private SearchHit createSearchHit(SearchContext context, + FieldsVisitor fieldsVisitor, + int docId, + int subDocId, + Map> storedToRequestedFields, LeafReaderContext subReaderContext) { if (fieldsVisitor == null) { return new SearchHit(docId); } - loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); - fieldsVisitor.postProcess(context.mapperService()); - Map searchFields = null; - if (!fieldsVisitor.fields().isEmpty()) { - searchFields = new HashMap<>(fieldsVisitor.fields().size()); - for (Map.Entry> entry : fieldsVisitor.fields().entrySet()) { - searchFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue())); - } - } + Map searchFields = getSearchFields(context, fieldsVisitor, subDocId, + storedToRequestedFields, subReaderContext); DocumentMapper documentMapper = context.mapperService().documentMapper(fieldsVisitor.uid().type()); Text typeText; @@ -223,9 +216,40 @@ private SearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVis return searchHit; } - private SearchHit createNestedSearchHit(SearchContext context, int nestedTopDocId, int nestedSubDocId, - int rootSubDocId, Set fieldNames, - List fieldNamePatterns, LeafReaderContext subReaderContext) throws IOException { + private Map getSearchFields(SearchContext context, + FieldsVisitor fieldsVisitor, + int subDocId, + Map> storedToRequestedFields, + LeafReaderContext subReaderContext) { + loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); + fieldsVisitor.postProcess(context.mapperService()); + + if (fieldsVisitor.fields().isEmpty()) { + return null; + } + + Map searchFields = new HashMap<>(fieldsVisitor.fields().size()); + for (Map.Entry> entry : fieldsVisitor.fields().entrySet()) { + String storedField = entry.getKey(); + List storedValues = entry.getValue(); + + if (storedToRequestedFields.containsKey(storedField)) { + for (String requestedField : storedToRequestedFields.get(storedField)) { + searchFields.put(requestedField, new DocumentField(requestedField, storedValues)); + } + } else { + searchFields.put(storedField, new DocumentField(storedField, storedValues)); + } + } + return searchFields; + } + + private SearchHit createNestedSearchHit(SearchContext context, + int nestedTopDocId, + int nestedSubDocId, + int rootSubDocId, + Map> storedToRequestedFields, + LeafReaderContext subReaderContext) throws IOException { // Also if highlighting is requested on nested documents we need to fetch the _source from the root document, // otherwise highlighting will attempt to fetch the _source from the nested doc, which will fail, // because the entire _source is only stored with the root document. @@ -244,9 +268,13 @@ private SearchHit createNestedSearchHit(SearchContext context, int nestedTopDocI source = null; } + Map searchFields = null; + if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) { + FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), false); + searchFields = getSearchFields(context, nestedFieldsVisitor, nestedSubDocId, + storedToRequestedFields, subReaderContext); + } - Map searchFields = - getSearchFields(context, nestedSubDocId, fieldNames, fieldNamePatterns, subReaderContext); DocumentMapper documentMapper = context.mapperService().documentMapper(uid.type()); SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, nestedSubDocId); @@ -307,26 +335,6 @@ private SearchHit createNestedSearchHit(SearchContext context, int nestedTopDocI return new SearchHit(nestedTopDocId, uid.id(), documentMapper.typeText(), nestedIdentity, searchFields); } - private Map getSearchFields(SearchContext context, int nestedSubDocId, Set fieldNames, - List fieldNamePatterns, LeafReaderContext subReaderContext) { - Map searchFields = null; - if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) { - FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames, - fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, false); - if (nestedFieldsVisitor != null) { - loadStoredFields(context, subReaderContext, nestedFieldsVisitor, nestedSubDocId); - nestedFieldsVisitor.postProcess(context.mapperService()); - if (!nestedFieldsVisitor.fields().isEmpty()) { - searchFields = new HashMap<>(nestedFieldsVisitor.fields().size()); - for (Map.Entry> entry : nestedFieldsVisitor.fields().entrySet()) { - searchFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue())); - } - } - } - } - return searchFields; - } - private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId, LeafReaderContext subReaderContext, MapperService mapperService, diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index e5ff7abc68b34..11e46061d6786 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -100,7 +100,7 @@ public void hitExecute(SearchContext context, HitContext hitContext) { if (highlightQuery == null) { highlightQuery = context.parsedQuery().query(); } - HighlighterContext highlighterContext = new HighlighterContext(fieldName, + HighlighterContext highlighterContext = new HighlighterContext(fieldType.name(), field, fieldType, context, hitContext, highlightQuery); if ((highlighter.canHighlight(fieldType) == false) && fieldNameContainsWildcards) { @@ -109,7 +109,11 @@ public void hitExecute(SearchContext context, HitContext hitContext) { } HighlightField highlightField = highlighter.highlight(highlighterContext); if (highlightField != null) { - highlightFields.put(highlightField.name(), highlightField); + // Note that we make sure to use the original field name in the response. This is because the + // original field could be an alias, and highlighter implementations may instead reference the + // concrete field it points to. + highlightFields.put(fieldName, + new HighlightField(fieldName, highlightField.fragments())); } } } diff --git a/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java index 2e04443f9e526..9ec20001adc8a 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java @@ -148,7 +148,7 @@ private FieldLookup loadFieldData(String name) { reader.document(docId, fieldVisitor); fieldVisitor.postProcess(mapperService); List storedFields = fieldVisitor.fields().get(data.fieldType().name()); - data.fields(singletonMap(name, storedFields)); + data.fields(singletonMap(fieldName, storedFields)); } catch (IOException e) { throw new ElasticsearchParseException("failed to load field [{}]", e, name); } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index dcdc669539f53..9199615868a13 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -321,7 +320,7 @@ protected void populateCommonFields(MapperService mapperService, SuggestionSearc suggestionContext.setAnalyzer(luceneAnalyzer); } - suggestionContext.setField(field); + suggestionContext.setField(fieldType.name()); if (size != null) { suggestionContext.setSize(size); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index c4f7d8a500064..48aaf705099da 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -29,8 +29,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; @@ -138,8 +138,8 @@ protected XContentBuilder toInnerXContent(XContentBuilder builder, Params params @Override public Set parseContext(ParseContext parseContext, XContentParser parser) throws IOException, ElasticsearchParseException { if (fieldName != null) { - FieldMapper mapper = parseContext.docMapper().mappers().getMapper(fieldName); - if (!(mapper instanceof GeoPointFieldMapper)) { + MappedFieldType fieldType = parseContext.mapperService().fullName(fieldName); + if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { throw new ElasticsearchParseException("referenced field must be mapped to geo_point"); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index be44d790b4004..86c2b67be9c54 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -76,8 +76,12 @@ public void testValidation() { " concrete index: [[foo/bar]] and indices: [myindex];"); } + /** + * Test that {@link PutMappingRequest#buildFromSimplifiedDef(String, Object...)} + * rejects inputs where the {@code Object...} varargs of field name and properties are not + * paired correctly + */ public void testBuildFromSimplifiedDef() { - // test that method rejects input where input varargs fieldname/properites are not paired correctly IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PutMappingRequest.buildFromSimplifiedDef("type", "only_field")); assertEquals("mapping source must be pairs of fieldnames and properties definition.", e.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java index 8fcc76e018a6c..1fd912e72a426 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java @@ -20,13 +20,20 @@ package org.elasticsearch.action.bulk; +import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.Map; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; +import static org.hamcrest.Matchers.equalTo; public class BulkIntegrationIT extends ESIntegTestCase { public void testBulkIndexCreatesMapping() throws Exception { @@ -40,4 +47,38 @@ public void testBulkIndexCreatesMapping() throws Exception { assertTrue(mappingsResponse.getMappings().get("logstash-2014.03.30").containsKey("logs")); }); } + + /** + * This tests that the {@link TransportBulkAction} evaluates alias routing values correctly when dealing with + * an alias pointing to multiple indices, while a write index exits. + */ + public void testBulkWithWriteIndexAndRouting() { + Map twoShardsSettings = Collections.singletonMap(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2); + client().admin().indices().prepareCreate("index1") + .addAlias(new Alias("alias1").indexRouting("0")).setSettings(twoShardsSettings).get(); + client().admin().indices().prepareCreate("index2") + .addAlias(new Alias("alias1").indexRouting("0").writeIndex(randomFrom(false, null))) + .setSettings(twoShardsSettings).get(); + client().admin().indices().prepareCreate("index3") + .addAlias(new Alias("alias1").indexRouting("1").writeIndex(true)).setSettings(twoShardsSettings).get(); + + IndexRequest indexRequestWithAlias = new IndexRequest("alias1", "type", "id"); + if (randomBoolean()) { + indexRequestWithAlias.routing("1"); + } + indexRequestWithAlias.source(Collections.singletonMap("foo", "baz")); + BulkResponse bulkResponse = client().prepareBulk().add(indexRequestWithAlias).get(); + assertThat(bulkResponse.getItems()[0].getResponse().getIndex(), equalTo("index3")); + assertThat(bulkResponse.getItems()[0].getResponse().getShardId().getId(), equalTo(0)); + assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(1L)); + assertThat(bulkResponse.getItems()[0].getResponse().status(), equalTo(RestStatus.CREATED)); + assertThat(client().prepareGet("index3", "type", "id").setRouting("1").get().getSource().get("foo"), equalTo("baz")); + + bulkResponse = client().prepareBulk().add(client().prepareUpdate("alias1", "type", "id").setDoc("foo", "updated")).get(); + assertFalse(bulkResponse.hasFailures()); + assertThat(client().prepareGet("index3", "type", "id").setRouting("1").get().getSource().get("foo"), equalTo("updated")); + bulkResponse = client().prepareBulk().add(client().prepareDelete("alias1", "type", "id")).get(); + assertFalse(bulkResponse.hasFailures()); + assertFalse(client().prepareGet("index3", "type", "id").setRouting("1").get().isExists()); + } } diff --git a/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java b/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java index 914c2b87422db..15b8e1c99d266 100644 --- a/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java @@ -21,9 +21,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.index.Index; -import org.elasticsearch.index.VersionType; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESTestCase; @@ -38,7 +36,7 @@ public class ResyncReplicationRequestTests extends ESTestCase { public void testSerialization() throws IOException { final byte[] bytes = "{}".getBytes(Charset.forName("UTF-8")); final Translog.Index index = new Translog.Index("type", "id", 0, randomNonNegativeLong(), - Versions.MATCH_ANY, VersionType.INTERNAL, bytes, null, -1); + randomNonNegativeLong(), bytes, null, -1); final ShardId shardId = new ShardId(new Index("index", "uuid"), 0); final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, 42L, new Translog.Operation[]{index}); diff --git a/server/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java b/server/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java index d72b4c5f1ec16..e8c152abdc216 100644 --- a/server/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/server/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; @@ -57,6 +58,7 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.client.Requests.createIndexRequest; +import static org.elasticsearch.client.Requests.deleteRequest; import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_METADATA_BLOCK; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_READ_ONLY_BLOCK; @@ -85,6 +87,17 @@ public void testAliases() throws Exception { ensureGreen(); + logger.info("--> aliasing index [test] with [alias1]"); + assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1", false)); + + logger.info("--> indexing against [alias1], should fail now"); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> client().index(indexRequest("alias1").type("type1").id("1").source(source("2", "test"), + XContentType.JSON)).actionGet()); + assertThat(exception.getMessage(), equalTo("no write index is defined for alias [alias1]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index")); + logger.info("--> aliasing index [test] with [alias1]"); assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1")); @@ -98,6 +111,44 @@ public void testAliases() throws Exception { ensureGreen(); + logger.info("--> add index [test_x] with [alias1]"); + assertAcked(admin().indices().prepareAliases().addAlias("test_x", "alias1")); + + logger.info("--> indexing against [alias1], should fail now"); + exception = expectThrows(IllegalArgumentException.class, + () -> client().index(indexRequest("alias1").type("type1").id("1").source(source("2", "test"), + XContentType.JSON)).actionGet()); + assertThat(exception.getMessage(), equalTo("no write index is defined for alias [alias1]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index")); + + logger.info("--> deleting against [alias1], should fail now"); + exception = expectThrows(IllegalArgumentException.class, + () -> client().delete(deleteRequest("alias1").type("type1").id("1")).actionGet()); + assertThat(exception.getMessage(), equalTo("no write index is defined for alias [alias1]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index")); + + logger.info("--> remove aliasing index [test_x] with [alias1]"); + assertAcked(admin().indices().prepareAliases().removeAlias("test_x", "alias1")); + + logger.info("--> indexing against [alias1], should work now"); + indexResponse = client().index(indexRequest("alias1").type("type1").id("1") + .source(source("1", "test"), XContentType.JSON)).actionGet(); + assertThat(indexResponse.getIndex(), equalTo("test")); + + logger.info("--> add index [test_x] with [alias1] as write-index"); + assertAcked(admin().indices().prepareAliases().addAlias("test_x", "alias1", true)); + + logger.info("--> indexing against [alias1], should work now"); + indexResponse = client().index(indexRequest("alias1").type("type1").id("1") + .source(source("1", "test"), XContentType.JSON)).actionGet(); + assertThat(indexResponse.getIndex(), equalTo("test_x")); + + logger.info("--> deleting against [alias1], should fail now"); + DeleteResponse deleteResponse = client().delete(deleteRequest("alias1").type("type1").id("1")).actionGet(); + assertThat(deleteResponse.getIndex(), equalTo("test_x")); + logger.info("--> remove [alias1], Aliasing index [test_x] with [alias1]"); assertAcked(admin().indices().prepareAliases().removeAlias("test", "alias1").addAlias("test_x", "alias1")); diff --git a/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java b/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java index 064702170d5bb..e5df229cd98a9 100644 --- a/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java +++ b/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java @@ -19,10 +19,19 @@ package org.elasticsearch.client.documentation; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESIntegTestCase; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.instanceOf; + /** * This class is used to generate the Java indices administration documentation. * You need to wrap your code between two tags like: @@ -48,16 +57,14 @@ public void testPutMappingDocumentation() throws Exception { Client client = client(); // tag::index-with-mapping - client.admin().indices().prepareCreate("twitter") // <1> - .addMapping("\"tweet\": {\n" + // <2> - " \"properties\": {\n" + - " \"message\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - "}") + client.admin().indices().prepareCreate("twitter") // <1> + .addMapping("tweet", "message", "type=text") // <2> .get(); // end::index-with-mapping + GetMappingsResponse getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); + assertEquals(1, getMappingsResponse.getMappings().size()); + ImmutableOpenMap indexMapping = getMappingsResponse.getMappings().get("twitter"); + assertThat(indexMapping.get("tweet"), instanceOf(MappingMetaData.class)); // we need to delete in order to create a fresh new index with another type client.admin().indices().prepareDelete("twitter").get(); @@ -89,6 +96,11 @@ public void testPutMappingDocumentation() throws Exception { "}", XContentType.JSON) .get(); // end::putMapping-request-source + getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); + assertEquals(1, getMappingsResponse.getMappings().size()); + indexMapping = getMappingsResponse.getMappings().get("twitter"); + assertEquals(singletonMap("properties", singletonMap("name", singletonMap("type", "text"))), + indexMapping.get("user").getSourceAsMap()); // tag::putMapping-request-source-append client.admin().indices().preparePutMapping("twitter") // <1> @@ -102,6 +114,13 @@ public void testPutMappingDocumentation() throws Exception { "}", XContentType.JSON) .get(); // end::putMapping-request-source-append + getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); + assertEquals(1, getMappingsResponse.getMappings().size()); + indexMapping = getMappingsResponse.getMappings().get("twitter"); + Map> expected = new HashMap<>(); + expected.put("name", singletonMap("type", "text")); + expected.put("user_name", singletonMap("type", "text")); + assertEquals(expected, indexMapping.get("user").getSourceAsMap().get("properties")); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 0530bd617af63..9ad9603b1489b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -20,14 +20,20 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.Version; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData.State; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.InvalidIndexNameException; @@ -37,6 +43,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.function.Function; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.arrayContaining; @@ -44,6 +51,7 @@ import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyArray; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -996,6 +1004,152 @@ public void testIndexAliases() { assertArrayEquals(new String[] {"test-alias-0", "test-alias-1", "test-alias-non-filtering"}, strings); } + public void testConcreteWriteIndexSuccessful() { + boolean testZeroWriteIndex = randomBoolean(); + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("test-0").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(testZeroWriteIndex ? true : null))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + String[] strings = indexNameExpressionResolver + .indexAliases(state, "test-0", x -> true, true, "test-*"); + Arrays.sort(strings); + assertArrayEquals(new String[] {"test-alias"}, strings); + IndicesRequest request = new IndicesRequest() { + + @Override + public String[] indices() { + return new String[] { "test-alias" }; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + } + }; + Index writeIndex = indexNameExpressionResolver.concreteWriteIndex(state, request); + assertThat(writeIndex.getName(), equalTo("test-0")); + + state = ClusterState.builder(state).metaData(MetaData.builder(state.metaData()) + .put(indexBuilder("test-1").putAlias(AliasMetaData.builder("test-alias") + .writeIndex(testZeroWriteIndex ? randomFrom(false, null) : true)))).build(); + writeIndex = indexNameExpressionResolver.concreteWriteIndex(state, request); + assertThat(writeIndex.getName(), equalTo(testZeroWriteIndex ? "test-0" : "test-1")); + } + + public void testConcreteWriteIndexWithInvalidIndicesRequest() { + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("test-0").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias"))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + Function requestGen = (indices) -> new IndicesRequest() { + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + } + }; + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> indexNameExpressionResolver.concreteWriteIndex(state, requestGen.apply(null))); + assertThat(exception.getMessage(), equalTo("indices request must specify a single index expression")); + exception = expectThrows(IllegalArgumentException.class, + () -> indexNameExpressionResolver.concreteWriteIndex(state, requestGen.apply(new String[] {"too", "many"}))); + assertThat(exception.getMessage(), equalTo("indices request must specify a single index expression")); + + + } + + public void testConcreteWriteIndexWithWildcardExpansion() { + boolean testZeroWriteIndex = randomBoolean(); + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("test-1").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(testZeroWriteIndex ? true : null))) + .put(indexBuilder("test-0").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(testZeroWriteIndex ? randomFrom(false, null) : true))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + String[] strings = indexNameExpressionResolver + .indexAliases(state, "test-0", x -> true, true, "test-*"); + Arrays.sort(strings); + assertArrayEquals(new String[] {"test-alias"}, strings); + IndicesRequest request = new IndicesRequest() { + + @Override + public String[] indices() { + return new String[] { "test-*"}; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictExpandOpenAndForbidClosed(); + } + }; + + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> indexNameExpressionResolver.concreteWriteIndex(state, request)); + assertThat(exception.getMessage(), + equalTo("The index expression [test-*] and options provided did not point to a single write-index")); + } + + public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("test-0").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(false))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + String[] strings = indexNameExpressionResolver + .indexAliases(state, "test-0", x -> true, true, "test-*"); + Arrays.sort(strings); + assertArrayEquals(new String[] {"test-alias"}, strings); + DocWriteRequest request = randomFrom(new IndexRequest("test-alias"), + new UpdateRequest("test-alias", "_type", "_id"), new DeleteRequest("test-alias")); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> indexNameExpressionResolver.concreteWriteIndex(state, request)); + assertThat(exception.getMessage(), equalTo("no write index is defined for alias [test-alias]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index")); + } + + public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("test-0").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(randomFrom(false, null)))) + .put(indexBuilder("test-1").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(randomFrom(false, null)))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + String[] strings = indexNameExpressionResolver + .indexAliases(state, "test-0", x -> true, true, "test-*"); + Arrays.sort(strings); + assertArrayEquals(new String[] {"test-alias"}, strings); + DocWriteRequest request = randomFrom(new IndexRequest("test-alias"), + new UpdateRequest("test-alias", "_type", "_id"), new DeleteRequest("test-alias")); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> indexNameExpressionResolver.concreteWriteIndex(state, request)); + assertThat(exception.getMessage(), equalTo("no write index is defined for alias [test-alias]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index")); + } + + public void testAliasResolutionNotAllowingMultipleIndices() { + boolean test0WriteIndex = randomBoolean(); + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("test-0").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(randomFrom(test0WriteIndex, null)))) + .put(indexBuilder("test-1").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(randomFrom(!test0WriteIndex, null)))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + String[] strings = indexNameExpressionResolver + .indexAliases(state, "test-0", x -> true, true, "test-*"); + Arrays.sort(strings); + assertArrayEquals(new String[] {"test-alias"}, strings); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> indexNameExpressionResolver.concreteIndexNames(state, IndicesOptions.strictSingleIndexNoExpandForbidClosed(), + "test-alias")); + assertThat(exception.getMessage(), endsWith(", can't execute a single index op")); + } + public void testDeleteIndexIgnoresAliases() { MetaData.Builder mdBuilder = MetaData.builder() .put(indexBuilder("test-index").state(State.OPEN) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 32dd4324ff835..38e3fcc6ea7c5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -172,6 +172,87 @@ public void testResolveIndexRouting() { } catch (IllegalArgumentException ex) { assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); } + + IndexMetaData.Builder builder2 = IndexMetaData.builder("index2") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("alias0").build()); + MetaData metaDataTwoIndices = MetaData.builder(metaData).put(builder2).build(); + + // alias with multiple indices + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> metaDataTwoIndices.resolveIndexRouting("1", "alias0")); + assertThat(exception.getMessage(), startsWith("Alias [alias0] has more than one index associated with it")); + } + + public void testResolveWriteIndexRouting() { + AliasMetaData.Builder aliasZeroBuilder = AliasMetaData.builder("alias0"); + if (randomBoolean()) { + aliasZeroBuilder.writeIndex(true); + } + IndexMetaData.Builder builder = IndexMetaData.builder("index") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(aliasZeroBuilder.build()) + .putAlias(AliasMetaData.builder("alias1").routing("1").build()) + .putAlias(AliasMetaData.builder("alias2").routing("1,2").build()) + .putAlias(AliasMetaData.builder("alias3").writeIndex(false).build()) + .putAlias(AliasMetaData.builder("alias4").routing("1,2").writeIndex(true).build()); + MetaData metaData = MetaData.builder().put(builder).build(); + + // no alias, no index + assertEquals(metaData.resolveWriteIndexRouting(null, null), null); + assertEquals(metaData.resolveWriteIndexRouting("0", null), "0"); + + // index, no alias + assertEquals(metaData.resolveWriteIndexRouting(null, "index"), null); + assertEquals(metaData.resolveWriteIndexRouting("0", "index"), "0"); + + // alias with no index routing + assertEquals(metaData.resolveWriteIndexRouting(null, "alias0"), null); + assertEquals(metaData.resolveWriteIndexRouting("0", "alias0"), "0"); + + // alias with index routing. + assertEquals(metaData.resolveWriteIndexRouting(null, "alias1"), "1"); + Exception exception = expectThrows(IllegalArgumentException.class, () -> metaData.resolveWriteIndexRouting("0", "alias1")); + assertThat(exception.getMessage(), + is("Alias [alias1] has index routing associated with it [1], and was provided with routing value [0], rejecting operation")); + + // alias with invalid index routing. + exception = expectThrows(IllegalArgumentException.class, () -> metaData.resolveWriteIndexRouting(null, "alias2")); + assertThat(exception.getMessage(), + is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); + exception = expectThrows(IllegalArgumentException.class, () -> metaData.resolveWriteIndexRouting("1", "alias2")); + assertThat(exception.getMessage(), + is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); + exception = expectThrows(IllegalArgumentException.class, () -> metaData.resolveWriteIndexRouting(randomFrom("1", null), "alias4")); + assertThat(exception.getMessage(), + is("index/alias [alias4] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); + + // alias with no write index + exception = expectThrows(IllegalArgumentException.class, () -> metaData.resolveWriteIndexRouting("1", "alias3")); + assertThat(exception.getMessage(), + is("alias [alias3] does not have a write index")); + + + // aliases with multiple indices + AliasMetaData.Builder aliasZeroBuilderTwo = AliasMetaData.builder("alias0"); + if (randomBoolean()) { + aliasZeroBuilder.writeIndex(false); + } + IndexMetaData.Builder builder2 = IndexMetaData.builder("index2") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(aliasZeroBuilderTwo.build()) + .putAlias(AliasMetaData.builder("alias1").routing("0").writeIndex(true).build()) + .putAlias(AliasMetaData.builder("alias2").writeIndex(true).build()); + MetaData metaDataTwoIndices = MetaData.builder(metaData).put(builder2).build(); + + // verify that new write index is used + assertThat("0", equalTo(metaDataTwoIndices.resolveWriteIndexRouting("0", "alias1"))); } public void testUnknownFieldClusterMetaData() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java index 849841943ecc6..fe7b02d63ecce 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java @@ -290,6 +290,7 @@ public void testIllegalSettingName() throws Exception { } public void testBackcompatV1() throws Exception { + assumeFalse("Can't run in a FIPS JVM as PBE is not available", inFipsJvm()); Path configDir = env.configFile(); SimpleFSDirectory directory = new SimpleFSDirectory(configDir); try (IndexOutput output = directory.createOutput("elasticsearch.keystore", IOContext.DEFAULT)) { @@ -320,6 +321,7 @@ public void testBackcompatV1() throws Exception { } public void testBackcompatV2() throws Exception { + assumeFalse("Can't run in a FIPS JVM as PBE is not available", inFipsJvm()); Path configDir = env.configFile(); SimpleFSDirectory directory = new SimpleFSDirectory(configDir); byte[] fileBytes = new byte[20]; diff --git a/server/src/test/java/org/elasticsearch/get/GetActionIT.java b/server/src/test/java/org/elasticsearch/get/GetActionIT.java index 30f86241cbd6d..5ed6b957c78a4 100644 --- a/server/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/server/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetRequestBuilder; import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -39,6 +40,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -51,6 +53,7 @@ import static java.util.Collections.singleton; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; @@ -70,7 +73,7 @@ public void testSimpleGet() { assertAcked(prepareCreate("test") .addMapping("type1", "field1", "type=keyword,store=true", "field2", "type=keyword,store=true") .setSettings(Settings.builder().put("index.refresh_interval", -1)) - .addAlias(new Alias("alias"))); + .addAlias(new Alias("alias").writeIndex(randomFrom(true, false, null)))); ensureGreen(); GetResponse response = client().prepareGet(indexOrAlias(), "type1", "1").get(); @@ -192,12 +195,31 @@ public void testSimpleGet() { assertThat(response.isExists(), equalTo(false)); } + public void testGetWithAliasPointingToMultipleIndices() { + client().admin().indices().prepareCreate("index1") + .addAlias(new Alias("alias1").indexRouting("0")).get(); + if (randomBoolean()) { + client().admin().indices().prepareCreate("index2") + .addAlias(new Alias("alias1").indexRouting("0").writeIndex(randomFrom(false, null))).get(); + } else { + client().admin().indices().prepareCreate("index3") + .addAlias(new Alias("alias1").indexRouting("1").writeIndex(true)).get(); + } + IndexResponse indexResponse = client().prepareIndex("index1", "type", "id") + .setSource(Collections.singletonMap("foo", "bar")).get(); + assertThat(indexResponse.status().getStatus(), equalTo(RestStatus.CREATED.getStatus())); + + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> + client().prepareGet("alias1", "type", "_alias_id").get()); + assertThat(exception.getMessage(), endsWith("can't execute a single index op")); + } + private static String indexOrAlias() { return randomBoolean() ? "test" : "alias"; } public void testSimpleMultiGet() throws Exception { - assertAcked(prepareCreate("test").addAlias(new Alias("alias")) + assertAcked(prepareCreate("test").addAlias(new Alias("alias").writeIndex(randomFrom(true, false, null))) .addMapping("type1", "field", "type=keyword,store=true") .setSettings(Settings.builder().put("index.refresh_interval", -1))); ensureGreen(); diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index 8c4879fd35e82..9aba48f7de55b 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -21,12 +21,11 @@ import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -84,14 +83,14 @@ public void testThatAnalyzersAreUsedInMapping() throws IOException { NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider(analyzerName, AnalyzerScope.INDEX, randomPreBuiltAnalyzer.getAnalyzer(randomVersion)).get(); - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("analyzer", analyzerName).endObject().endObject() - .endObject().endObject()); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + .endObject().endObject(); + MapperService mapperService = createIndex("test", indexSettings, "type", mapping).mapperService(); - FieldMapper fieldMapper = docMapper.mappers().getMapper("field"); - assertThat(fieldMapper.fieldType().searchAnalyzer(), instanceOf(NamedAnalyzer.class)); - NamedAnalyzer fieldMapperNamedAnalyzer = fieldMapper.fieldType().searchAnalyzer(); + MappedFieldType fieldType = mapperService.fullName("field"); + assertThat(fieldType.searchAnalyzer(), instanceOf(NamedAnalyzer.class)); + NamedAnalyzer fieldMapperNamedAnalyzer = fieldType.searchAnalyzer(); assertThat(fieldMapperNamedAnalyzer.analyzer(), is(namedAnalyzer.analyzer())); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 21f1781cc65a0..f7742b64c1942 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -1196,7 +1196,7 @@ public void testVersioningNewCreate() throws IOException { assertThat(indexResult.getVersion(), equalTo(1L)); create = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), create.primaryTerm(), indexResult.getVersion(), - create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false); + null, REPLICA, 0, -1, false); indexResult = replicaEngine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); } @@ -1210,7 +1210,7 @@ public void testReplicatedVersioningWithFlush() throws IOException { create = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), create.primaryTerm(), indexResult.getVersion(), - create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false); + null, REPLICA, 0, -1, false); indexResult = replicaEngine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); assertTrue(indexResult.isCreated()); @@ -1229,7 +1229,7 @@ public void testReplicatedVersioningWithFlush() throws IOException { update = new Engine.Index(newUid(doc), doc, updateResult.getSeqNo(), update.primaryTerm(), updateResult.getVersion(), - update.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false); + null, REPLICA, 0, -1, false); updateResult = replicaEngine.index(update); assertThat(updateResult.getVersion(), equalTo(2L)); assertFalse(updateResult.isCreated()); @@ -1282,7 +1282,7 @@ public void testVersioningNewIndex() throws IOException { Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); - index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false); + index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), null, REPLICA, 0, -1, false); indexResult = replicaEngine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); } @@ -3253,7 +3253,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); - index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); + index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); indexResult = replicaEngine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -3267,7 +3267,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep assertEquals(1, topDocs.totalHits); } - index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); + index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); indexResult = replicaEngine.index(index); assertThat(indexResult.getResultType(), equalTo(Engine.Result.Type.SUCCESS)); replicaEngine.refresh("test"); @@ -3287,7 +3287,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() Engine.IndexResult result = engine.index(firstIndexRequest); assertThat(result.getVersion(), equalTo(1L)); - Engine.Index firstIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), firstIndexRequest.primaryTerm(), result.getVersion(), firstIndexRequest.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); + Engine.Index firstIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), firstIndexRequest.primaryTerm(), result.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); Engine.IndexResult indexReplicaResult = replicaEngine.index(firstIndexRequestReplica); assertThat(indexReplicaResult.getVersion(), equalTo(1L)); @@ -3301,7 +3301,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() assertEquals(1, topDocs.totalHits); } - Engine.Index secondIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), secondIndexRequest.primaryTerm(), result.getVersion(), firstIndexRequest.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); + Engine.Index secondIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), secondIndexRequest.primaryTerm(), result.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); replicaEngine.index(secondIndexRequestReplica); replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { @@ -3324,7 +3324,7 @@ public Engine.Index appendOnlyPrimary(ParsedDocument doc, boolean retry, final l } public Engine.Index appendOnlyReplica(ParsedDocument doc, boolean retry, final long autoGeneratedIdTimestamp, final long seqNo) { - return new Engine.Index(newUid(doc), doc, seqNo, 2, 1, VersionType.EXTERNAL, + return new Engine.Index(newUid(doc), doc, seqNo, 2, 1, null, Engine.Operation.Origin.REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, retry); } @@ -3728,7 +3728,7 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio sequenceNumberSupplier.getAsLong(), 1, i, - VersionType.EXTERNAL, + origin == PRIMARY ? VersionType.EXTERNAL : null, origin, System.nanoTime(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, @@ -3742,7 +3742,7 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio sequenceNumberSupplier.getAsLong(), 1, i, - VersionType.EXTERNAL, + origin == PRIMARY ? VersionType.EXTERNAL : null, origin, System.nanoTime()); operations.add(delete); @@ -4030,7 +4030,7 @@ public void markSeqNoAsCompleted(long seqNo) { final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null); final Term uid = newUid(doc); final long time = System.nanoTime(); - actualEngine.index(new Engine.Index(uid, doc, seqNo, 1, 1, VersionType.EXTERNAL, REPLICA, time, time, false)); + actualEngine.index(new Engine.Index(uid, doc, seqNo, 1, 1, null, REPLICA, time, time, false)); if (rarely()) { actualEngine.rollTranslogGeneration(); } @@ -4788,7 +4788,7 @@ public void testTrimUnsafeCommits() throws Exception { for (int i = 0; i < seqNos.size(); i++) { ParsedDocument doc = testParsedDocument(Long.toString(seqNos.get(i)), null, testDocument(), new BytesArray("{}"), null); Engine.Index index = new Engine.Index(newUid(doc), doc, seqNos.get(i), 0, - 1, VersionType.EXTERNAL, REPLICA, System.nanoTime(), -1, false); + 1, null, REPLICA, System.nanoTime(), -1, false); engine.index(index); if (randomBoolean()) { engine.flush(); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesMissingV6BehaviourTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesMissingV6BehaviourTests.java deleted file mode 100644 index 1dc836874d847..0000000000000 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesMissingV6BehaviourTests.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.fielddata; - -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.fielddata.ScriptDocValues.Longs; -import org.elasticsearch.index.fielddata.ScriptDocValues.Dates; -import org.elasticsearch.index.fielddata.ScriptDocValues.Booleans; -import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.script.MockScriptEngine; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptEngine; -import org.elasticsearch.script.ScriptModule; -import org.elasticsearch.test.ESTestCase; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.ReadableDateTime; -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; - -import static java.util.Collections.singletonList; - -public class ScriptDocValuesMissingV6BehaviourTests extends ESTestCase { - - public void testScriptMissingValuesWarning(){ - new ScriptModule(Settings.EMPTY, singletonList(new ScriptPlugin() { - @Override - public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap("1", script -> "1")); - } - })); - assertWarnings("Script: returning default values for missing document values is deprecated. " + - "Set system property '-Des.scripting.exception_for_missing_value=true' " + - "to make behaviour compatible with future major versions."); - } - - public void testZeroForMissingValueLong() throws IOException { - long[][] values = new long[between(3, 10)][]; - for (int d = 0; d < values.length; d++) { - values[d] = new long[0]; - } - Longs longs = wrap(values); - for (int round = 0; round < 10; round++) { - int d = between(0, values.length - 1); - longs.setNextDocId(d); - assertEquals(0, longs.getValue()); - } - } - - public void testEpochForMissingValueDate() throws IOException { - final ReadableDateTime EPOCH = new DateTime(0, DateTimeZone.UTC); - long[][] values = new long[between(3, 10)][]; - for (int d = 0; d < values.length; d++) { - values[d] = new long[0]; - } - Dates dates = wrapDates(values); - for (int round = 0; round < 10; round++) { - int d = between(0, values.length - 1); - dates.setNextDocId(d); - assertEquals(EPOCH, dates.getValue()); - } - } - - public void testFalseForMissingValueBoolean() throws IOException { - long[][] values = new long[between(3, 10)][]; - for (int d = 0; d < values.length; d++) { - values[d] = new long[0]; - } - Booleans bools = wrapBooleans(values); - for (int round = 0; round < 10; round++) { - int d = between(0, values.length - 1); - bools.setNextDocId(d); - assertEquals(false, bools.getValue()); - } - } - - public void testNullForMissingValueGeo() throws IOException{ - final MultiGeoPointValues values = wrap(new GeoPoint[0]); - final ScriptDocValues.GeoPoints script = new ScriptDocValues.GeoPoints(values); - script.setNextDocId(0); - assertEquals(null, script.getValue()); - } - - - private Longs wrap(long[][] values) { - return new Longs(new AbstractSortedNumericDocValues() { - long[] current; - int i; - @Override - public boolean advanceExact(int doc) { - i = 0; - current = values[doc]; - return current.length > 0; - } - @Override - public int docValueCount() { - return current.length; - } - @Override - public long nextValue() { - return current[i++]; - } - }); - } - - private Booleans wrapBooleans(long[][] values) { - return new Booleans(new AbstractSortedNumericDocValues() { - long[] current; - int i; - @Override - public boolean advanceExact(int doc) { - i = 0; - current = values[doc]; - return current.length > 0; - } - @Override - public int docValueCount() { - return current.length; - } - @Override - public long nextValue() { - return current[i++]; - } - }); - } - - private Dates wrapDates(long[][] values) { - return new Dates(new AbstractSortedNumericDocValues() { - long[] current; - int i; - @Override - public boolean advanceExact(int doc) { - current = values[doc]; - i = 0; - return current.length > 0; - } - @Override - public int docValueCount() { - return current.length; - } - @Override - public long nextValue() { - return current[i++]; - } - }); - } - - - private static MultiGeoPointValues wrap(final GeoPoint... points) { - return new MultiGeoPointValues() { - int docID = -1; - int i; - @Override - public GeoPoint nextValue() { - if (docID != 0) { - fail(); - } - return points[i++]; - } - @Override - public boolean advanceExact(int docId) { - docID = docId; - return points.length > 0; - } - @Override - public int docValueCount() { - if (docID != 0) { - return 0; - } - return points.length; - } - }; - } - -} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java index 6e9cb6c0b5980..5f4cd98600b46 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java @@ -27,6 +27,8 @@ import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.plugins.Plugin; @@ -49,32 +51,32 @@ protected Collection> getPlugins() { } public void testDefaultMapping() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "binary") .endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + MapperService mapperService = createIndex("test", Settings.EMPTY, "type", mapping).mapperService(); + MappedFieldType fieldType = mapperService.fullName("field"); - FieldMapper fieldMapper = mapper.mappers().getMapper("field"); - assertThat(fieldMapper, instanceOf(BinaryFieldMapper.class)); - assertThat(fieldMapper.fieldType().stored(), equalTo(false)); + assertThat(fieldType, instanceOf(BinaryFieldMapper.BinaryFieldType.class)); + assertThat(fieldType.stored(), equalTo(false)); } public void testStoredValue() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "binary") .field("store", true) .endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + MapperService mapperService = createIndex("test", Settings.EMPTY, "type", mapping).mapperService(); // case 1: a simple binary value final byte[] binaryValue1 = new byte[100]; @@ -89,13 +91,14 @@ public void testStoredValue() throws IOException { assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2))); for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "id", + ParsedDocument doc = mapperService.documentMapper().parse(SourceToParse.source("test", "type", "id", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()), XContentType.JSON)); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); assertEquals(new BytesRef(value), indexedValue); - FieldMapper fieldMapper = mapper.mappers().getMapper("field"); - Object originalValue = fieldMapper.fieldType().valueForDisplay(indexedValue); + + MappedFieldType fieldType = mapperService.fullName("field"); + Object originalValue = fieldType.valueForDisplay(indexedValue); assertEquals(new BytesArray(value), originalValue); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index 44ecb24b67214..8638e16e29af2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -52,7 +52,6 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { private IndexService indexService; private DocumentMapperParser parser; - private DocumentMapperParser preEs6Parser; @Before public void setup() { @@ -101,7 +100,7 @@ public void testSerialization() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper mapper = defaultMapper.mappers().getMapper("field"); + Mapper mapper = defaultMapper.mappers().getMapper("field"); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); mapper.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 1381b6e920559..a01ddccc9398b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -61,10 +62,9 @@ public void testDefaultConfiguration() throws IOException { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); - - MappedFieldType completionFieldType = fieldMapper.fieldType(); + MappedFieldType completionFieldType = ((CompletionFieldMapper) fieldMapper).fieldType(); NamedAnalyzer indexAnalyzer = completionFieldType.indexAnalyzer(); assertThat(indexAnalyzer.name(), equalTo("simple")); @@ -94,10 +94,9 @@ public void testCompletionAnalyzerSettings() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); - - MappedFieldType completionFieldType = fieldMapper.fieldType(); + MappedFieldType completionFieldType = ((CompletionFieldMapper) fieldMapper).fieldType(); NamedAnalyzer indexAnalyzer = completionFieldType.indexAnalyzer(); assertThat(indexAnalyzer.name(), equalTo("simple")); @@ -129,12 +128,11 @@ public void testTypeParsing() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); - CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; XContentBuilder builder = jsonBuilder().startObject(); - completionFieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); + fieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); builder.close(); Map serializedMap = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)).map(); Map configMap = (Map) serializedMap.get("completion"); @@ -153,15 +151,15 @@ public void testParsingMinimal() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("completion", "suggestion") .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertSuggestFields(fields, 1); } @@ -192,15 +190,15 @@ public void testParsingMultiValued() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .array("completion", "suggestion1", "suggestion2") .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertSuggestFields(fields, 2); } @@ -212,8 +210,8 @@ public void testParsingWithWeight() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() @@ -223,7 +221,7 @@ public void testParsingWithWeight() throws Exception { .endObject() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertSuggestFields(fields, 1); } @@ -235,8 +233,8 @@ public void testParsingMultiValueWithWeight() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() @@ -246,10 +244,50 @@ public void testParsingMultiValueWithWeight() throws Exception { .endObject() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertSuggestFields(fields, 3); } + public void testParsingWithGeoFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type1") + .startObject("properties") + .startObject("completion") + .field("type", "completion") + .startObject("contexts") + .field("name", "location") + .field("type", "geo") + .field("path", "alias") + .endObject() + .endObject() + .startObject("birth-place") + .field("type", "geo_point") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "birth-place") + .endObject() + .endObject() + .endObject() + .endObject(); + + MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + Mapper fieldMapper = mapperService.documentMapper().mappers().getMapper("completion"); + + ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("completion") + .field("input", "suggestion") + .startObject("contexts") + .field("location", "37.77,-122.42") + .endObject() + .endObject() + .endObject()), XContentType.JSON)); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); + assertSuggestFields(fields, 1); + } + public void testParsingFull() throws Exception { String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") @@ -258,8 +296,8 @@ public void testParsingFull() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() @@ -279,7 +317,7 @@ public void testParsingFull() throws Exception { .endArray() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertSuggestFields(fields, 3); } @@ -291,8 +329,8 @@ public void testParsingMixed() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() @@ -312,7 +350,7 @@ public void testParsingMixed() throws Exception { .endArray() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertSuggestFields(fields, 6); } @@ -420,7 +458,7 @@ public void testPrefixQueryType() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType().prefixQuery(new BytesRef("co")); assertThat(prefixQuery, instanceOf(PrefixCompletionQuery.class)); @@ -434,7 +472,7 @@ public void testFuzzyQueryType() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType().fuzzyQuery("co", Fuzziness.fromEdits(FuzzyCompletionQuery.DEFAULT_MAX_EDITS), FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX, @@ -451,7 +489,7 @@ public void testRegexQueryType() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType() .regexpQuery(new BytesRef("co"), RegExp.ALL, Operations.DEFAULT_MAX_DETERMINIZED_STATES); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java index eff6222e6c6ff..5eb102208eb30 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java @@ -72,7 +72,7 @@ public void testCopyToFieldsParsing() throws Exception { IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type1").setSource(mapping, XContentType.JSON).get(); DocumentMapper docMapper = index.mapperService().documentMapper("type1"); - FieldMapper fieldMapper = docMapper.mappers().getMapper("copy_test"); + Mapper fieldMapper = docMapper.mappers().getMapper("copy_test"); // Check json serialization TextFieldMapper stringFieldMapper = (TextFieldMapper) fieldMapper; @@ -123,7 +123,7 @@ public void testCopyToFieldsParsing() throws Exception { docMapper = index.mapperService().documentMapper("type1"); fieldMapper = docMapper.mappers().getMapper("new_field"); - assertThat(fieldMapper.fieldType().typeName(), equalTo("long")); + assertThat(fieldMapper.typeName(), equalTo("long")); } public void testCopyToFieldsInnerObjectParsing() throws Exception { @@ -308,13 +308,15 @@ public void testCopyToFieldMerge() throws Exception { MapperService mapperService = createIndex("test").mapperService(); DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore), MapperService.MergeReason.MAPPING_UPDATE); + FieldMapper fieldMapperBefore = (FieldMapper) docMapperBefore.mappers().getMapper("copy_test"); - assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields()); + assertEquals(Arrays.asList("foo", "bar"), fieldMapperBefore.copyTo().copyToFields()); DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter), MapperService.MergeReason.MAPPING_UPDATE); + FieldMapper fieldMapperAfter = (FieldMapper) docMapperAfter.mappers().getMapper("copy_test"); - assertEquals(Arrays.asList("baz", "bar"), docMapperAfter.mappers().getMapper("copy_test").copyTo().copyToFields()); - assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields()); + assertEquals(Arrays.asList("baz", "bar"), fieldMapperAfter.copyTo().copyToFields()); + assertEquals(Arrays.asList("foo", "bar"), fieldMapperBefore.copyTo().copyToFields()); } public void testCopyToNestedField() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index c19965ac5f77a..51b270940998a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -382,11 +382,11 @@ public void testMergeDate() throws IOException { .startObject("properties") .startObject("release_date").field("type", "date").field("format", "yyyy/MM/dd").endObject() .endObject().endObject().endObject()); - DocumentMapper initMapper = indexService.mapperService().merge("movie", new CompressedXContent(initMapping), + indexService.mapperService().merge("movie", new CompressedXContent(initMapping), MapperService.MergeReason.MAPPING_UPDATE); - assertThat(initMapper.mappers().getMapper("release_date"), notNullValue()); - assertFalse(initMapper.mappers().getMapper("release_date").fieldType().stored()); + assertThat(indexService.mapperService().fullName("release_date"), notNullValue()); + assertFalse(indexService.mapperService().fullName("release_date").stored()); String updateFormatMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("movie") .startObject("properties") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java index 4e79a68c50e5c..4373f2210a7c7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java @@ -39,6 +39,7 @@ import java.io.IOException; import java.io.StringReader; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class DocumentFieldMapperTests extends LuceneTestCase { @@ -138,7 +139,12 @@ public void testAnalyzers() throws IOException { Analyzer defaultSearch = new FakeAnalyzer("default_search"); Analyzer defaultSearchQuote = new FakeAnalyzer("default_search_quote"); - DocumentFieldMappers documentFieldMappers = new DocumentFieldMappers(Arrays.asList(fieldMapper1, fieldMapper2), defaultIndex, defaultSearch, defaultSearchQuote); + DocumentFieldMappers documentFieldMappers = new DocumentFieldMappers( + Arrays.asList(fieldMapper1, fieldMapper2), + Collections.emptyList(), + defaultIndex, + defaultSearch, + defaultSearchQuote); assertAnalyzes(documentFieldMappers.indexAnalyzer(), "field1", "index"); assertAnalyzes(documentFieldMappers.searchAnalyzer(), "field1", "search"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java index 0234fcb681d82..54b6b2310da57 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java @@ -23,9 +23,10 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; @@ -104,38 +105,50 @@ public void testMergeObjectAndNested() throws Exception { } public void testMergeSearchAnalyzer() throws Exception { - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject() - .endObject().endObject()); - String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "keyword").endObject().endObject() - .endObject().endObject()); + XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "standard") + .field("search_analyzer", "whitespace") + .endObject().endObject() + .endObject().endObject(); + MapperService mapperService = createIndex("test", Settings.EMPTY, "type", mapping1).mapperService(); - DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1)); - DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2)); + assertThat(mapperService.fullName("field").searchAnalyzer().name(), equalTo("whitespace")); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); - DocumentMapper merged = existing.merge(changed.mapping()); - - assertThat(((NamedAnalyzer) merged.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword")); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "standard") + .field("search_analyzer", "keyword") + .endObject().endObject() + .endObject().endObject()); + + mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE); + assertThat(mapperService.fullName("field").searchAnalyzer().name(), equalTo("keyword")); } public void testChangeSearchAnalyzerToDefault() throws Exception { - MapperService mapperService = createIndex("test").mapperService(); - String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject() - .endObject().endObject()); - String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").endObject().endObject() - .endObject().endObject()); + XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "standard") + .field("search_analyzer", "whitespace") + .endObject().endObject() + .endObject().endObject(); + MapperService mapperService = createIndex("test", Settings.EMPTY, "type", mapping1).mapperService(); - DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), MapperService.MergeReason.MAPPING_UPDATE); - DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE); + assertThat(mapperService.fullName("field").searchAnalyzer().name(), equalTo("whitespace")); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); - - assertThat(((NamedAnalyzer) merged.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard")); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "standard") + .endObject().endObject() + .endObject().endObject()); + + mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE); + assertThat(mapperService.fullName("field").searchAnalyzer().name(), equalTo("standard")); } public void testConcurrentMergeTest() throws Throwable { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 57446825e9d31..8cb654db92afc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -1005,7 +1005,7 @@ public void testSimpleMapper() throws Exception { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); } @@ -1018,8 +1018,8 @@ public void testParseToJsonAndParse() throws Exception { DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = builtDocMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); - assertThat(doc.getBinaryValue(docMapper.idFieldMapper().fieldType().name()), equalTo(Uid.encodeId("1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); + assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); } public void testSimpleParser() throws Exception { @@ -1030,8 +1030,8 @@ public void testSimpleParser() throws Exception { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); - assertThat(doc.getBinaryValue(docMapper.idFieldMapper().fieldType().name()), equalTo(Uid.encodeId("1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); + assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); } public void testSimpleParserNoTypeNoId() throws Exception { @@ -1039,8 +1039,8 @@ public void testSimpleParserNoTypeNoId() throws Exception { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json")); Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); - assertThat(doc.getBinaryValue(docMapper.idFieldMapper().fieldType().name()), equalTo(Uid.encodeId("1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); + assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); } public void testAttributes() throws Exception { @@ -1393,4 +1393,98 @@ public void testBlankFieldNames() throws Exception { client().prepareIndex("idx", "type").setSource(bytes2, XContentType.JSON).get()); assertThat(ExceptionsHelper.detailedMessage(err), containsString("field name cannot be an empty string")); } + + public void testWriteToFieldAlias() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "alias") + .field("path", "concrete-field") + .endObject() + .startObject("concrete-field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); + + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject() + .field("alias-field", "value") + .endObject()); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + + assertEquals("Cannot write to a field alias [alias-field].", exception.getCause().getMessage()); + } + + public void testCopyToFieldAlias() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "alias") + .field("path", "concrete-field") + .endObject() + .startObject("concrete-field") + .field("type", "keyword") + .endObject() + .startObject("text-field") + .field("type", "text") + .field("copy_to", "alias-field") + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); + + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject() + .field("text-field", "value") + .endObject()); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + + assertEquals("Cannot copy to a field alias [alias-field].", exception.getCause().getMessage()); + } + + public void testDynamicDottedFieldNameWithFieldAlias() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "alias") + .field("path", "concrete-field") + .endObject() + .startObject("concrete-field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); + + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("alias-field.dynamic-field") + .field("type", "keyword") + .endObject() + .endObject()); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + + assertEquals("Could not dynamically add mapping for field [alias-field.dynamic-field]. " + + "Existing mapping for [alias-field] must be of type object but found [alias].", exception.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index c50320900923c..b7ee74fb773a0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -45,7 +45,9 @@ public void testDoubleIndexingSameDoc() throws Exception { .endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); - DocumentMapper mapper = index.mapperService().documentMapper("type"); + MapperService mapperService = index.mapperService(); + DocumentMapper mapper = mapperService.documentMapper(); + QueryShardContext context = index.newQueryShardContext(0, null, () -> 0L, null); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference @@ -61,7 +63,6 @@ public void testDoubleIndexingSameDoc() throws Exception { assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type") .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); - mapper = index.mapperService().documentMapper("type"); writer.addDocument(doc.rootDoc()); writer.addDocument(doc.rootDoc()); @@ -69,25 +70,25 @@ public void testDoubleIndexingSameDoc() throws Exception { IndexReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); - TopDocs topDocs = searcher.search(mapper.mappers().getMapper("field1").fieldType().termQuery("value1", context), 10); + TopDocs topDocs = searcher.search(mapperService.fullName("field1").termQuery("value1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().getMapper("field2").fieldType().termQuery("1", context), 10); + topDocs = searcher.search(mapperService.fullName("field2").termQuery("1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().getMapper("field3").fieldType().termQuery("1.1", context), 10); + topDocs = searcher.search(mapperService.fullName("field3").termQuery("1.1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().getMapper("field4").fieldType().termQuery("2010-01-01", context), 10); + topDocs = searcher.search(mapperService.fullName("field4").termQuery("2010-01-01", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("1", context), 10); + topDocs = searcher.search(mapperService.fullName("field5").termQuery("1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("2", context), 10); + topDocs = searcher.search(mapperService.fullName("field5").termQuery("2", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("3", context), 10); + topDocs = searcher.search(mapperService.fullName("field5").termQuery("3", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); writer.close(); reader.close(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index e26ea4ff3769f..d5419bed239dc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -629,11 +629,11 @@ public void testNumericDetectionEnabled() throws Exception { .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); defaultMapper = index.mapperService().documentMapper("type"); - FieldMapper mapper = defaultMapper.mappers().getMapper("s_long"); - assertThat(mapper.fieldType().typeName(), equalTo("long")); + Mapper mapper = defaultMapper.mappers().getMapper("s_long"); + assertThat(mapper.typeName(), equalTo("long")); mapper = defaultMapper.mappers().getMapper("s_double"); - assertThat(mapper.fieldType().typeName(), equalTo("float")); + assertThat(mapper.typeName(), equalTo("float")); } public void testNumericDetectionDefault() throws Exception { @@ -656,7 +656,7 @@ public void testNumericDetectionDefault() throws Exception { .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get()); defaultMapper = index.mapperService().documentMapper("type"); - FieldMapper mapper = defaultMapper.mappers().getMapper("s_long"); + Mapper mapper = defaultMapper.mappers().getMapper("s_long"); assertThat(mapper, instanceOf(TextFieldMapper.class)); mapper = defaultMapper.mappers().getMapper("s_double"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index d8e8c8e0e3da5..62c764e8060af 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -30,11 +30,11 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.hamcrest.Matchers; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; public class DynamicTemplatesTests extends ESSingleNodeTestCase { public void testMatchTypeOnly() throws Exception { @@ -45,7 +45,9 @@ public void testMatchTypeOnly() throws Exception { .endObject().endObject().endArray().endObject().endObject(); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("person").setSource(builder).get(); - DocumentMapper docMapper = index.mapperService().documentMapper("person"); + + MapperService mapperService = index.mapperService(); + DocumentMapper docMapper = mapperService.documentMapper("person"); builder = JsonXContent.contentBuilder(); builder.startObject().field("s", "hello").field("l", 1).endObject(); ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", BytesReference.bytes(builder), @@ -53,14 +55,11 @@ public void testMatchTypeOnly() throws Exception { client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); - docMapper = index.mapperService().documentMapper("person"); - DocumentFieldMappers mappers = docMapper.mappers(); - - assertThat(mappers.getMapper("s"), Matchers.notNullValue()); - assertEquals(IndexOptions.NONE, mappers.getMapper("s").fieldType().indexOptions()); + assertThat(mapperService.fullName("s"), notNullValue()); + assertEquals(IndexOptions.NONE, mapperService.fullName("s").indexOptions()); - assertThat(mappers.getMapper("l"), Matchers.notNullValue()); - assertNotSame(IndexOptions.NONE, mappers.getMapper("l").fieldType().indexOptions()); + assertThat(mapperService.fullName("l"), notNullValue()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("l").indexOptions()); } @@ -84,7 +83,7 @@ public void testSimple() throws Exception { assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); assertThat(f.fieldType().tokenized(), equalTo(false)); - FieldMapper fieldMapper = docMapper.mappers().getMapper("name"); + Mapper fieldMapper = docMapper.mappers().getMapper("name"); assertNotNull(fieldMapper); f = doc.getField("multi1"); @@ -143,7 +142,7 @@ public void testSimpleWithXContentTraverse() throws Exception { assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); assertThat(f.fieldType().tokenized(), equalTo(false)); - FieldMapper fieldMapper = docMapper.mappers().getMapper("name"); + Mapper fieldMapper = docMapper.mappers().getMapper("name"); assertNotNull(fieldMapper); f = doc.getField("multi1"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java new file mode 100644 index 0000000000000..9f87ad3d0390a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java @@ -0,0 +1,167 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.MapperService.MergeReason; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.junit.Before; + +import java.io.IOException; + +public class FieldAliasMapperTests extends ESSingleNodeTestCase { + private MapperService mapperService; + private DocumentMapperParser parser; + + @Before + public void setup() { + IndexService indexService = createIndex("test"); + mapperService = indexService.mapperService(); + parser = mapperService.documentMapperParser(); + } + + public void testParsing() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "alias") + .field("path", "concrete-field") + .endObject() + .startObject("concrete-field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + } + + public void testParsingWithMissingPath() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "alias") + .endObject() + .endObject() + .endObject() + .endObject()); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("The [path] property must be specified for field [alias-field].", exception.getMessage()); + } + + public void testParsingWithExtraArgument() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "alias") + .field("path", "concrete-field") + .field("extra-field", "extra-value") + .endObject() + .endObject() + .endObject() + .endObject()); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("Mapping definition for [alias-field] has unsupported parameters: [extra-field : extra-value]", + exception.getMessage()); + } + + public void testMerge() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("first-field") + .field("type", "keyword") + .endObject() + .startObject("alias-field") + .field("type", "alias") + .field("path", "first-field") + .endObject() + .endObject() + .endObject() + .endObject()); + mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + MappedFieldType firstFieldType = mapperService.fullName("alias-field"); + assertEquals("first-field", firstFieldType.name()); + assertTrue(firstFieldType instanceof KeywordFieldMapper.KeywordFieldType); + + String newMapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("second-field") + .field("type", "text") + .endObject() + .startObject("alias-field") + .field("type", "alias") + .field("path", "second-field") + .endObject() + .endObject() + .endObject() + .endObject()); + mapperService.merge("type", new CompressedXContent(newMapping), MergeReason.MAPPING_UPDATE); + + MappedFieldType secondFieldType = mapperService.fullName("alias-field"); + assertEquals("second-field", secondFieldType.name()); + assertTrue(secondFieldType instanceof TextFieldMapper.TextFieldType); + } + + public void testMergeFailure() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("concrete-field") + .field("type", "text") + .endObject() + .startObject("alias-field") + .field("type", "alias") + .field("path", "concrete-field") + .endObject() + .endObject() + .endObject() + .endObject()); + mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + String newMapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject()); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> mapperService.merge("type", new CompressedXContent(newMapping), MergeReason.MAPPING_UPDATE)); + assertEquals("Cannot merge a field alias mapping [alias-field] with a mapping that is not for a field alias.", + exception.getMessage()); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index 4f1b908cae84e..6e27823f8a0c0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -28,10 +28,11 @@ import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.Iterator; import java.util.List; +import static java.util.Collections.emptyList; + public class FieldTypeLookupTests extends ESTestCase { public void testEmpty() { @@ -48,7 +49,7 @@ public void testEmpty() { public void testDefaultMapping() { FieldTypeLookup lookup = new FieldTypeLookup(); try { - lookup.copyAndAddAll(MapperService.DEFAULT_MAPPING, Collections.emptyList()); + lookup.copyAndAddAll(MapperService.DEFAULT_MAPPING, emptyList(), emptyList()); fail(); } catch (IllegalArgumentException expected) { assertEquals("Default mappings should not be added to the lookup", expected.getMessage()); @@ -58,7 +59,7 @@ public void testDefaultMapping() { public void testAddNewField() { FieldTypeLookup lookup = new FieldTypeLookup(); MockFieldMapper f = new MockFieldMapper("foo"); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f)); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f), emptyList()); assertNull(lookup.get("foo")); assertNull(lookup.get("bar")); assertEquals(f.fieldType(), lookup2.get("foo")); @@ -70,68 +71,203 @@ public void testAddExistingField() { MockFieldMapper f = new MockFieldMapper("foo"); MockFieldMapper f2 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); + lookup = lookup.copyAndAddAll("type1", newList(f), emptyList()); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), emptyList()); assertEquals(1, size(lookup2.iterator())); assertSame(f.fieldType(), lookup2.get("foo")); assertEquals(f2.fieldType(), lookup2.get("foo")); } - public void testCheckCompatibilityMismatchedTypes() { + public void testMismatchedFieldTypes() { FieldMapper f1 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1), emptyList()); OtherFakeFieldType ft2 = new OtherFakeFieldType(); ft2.setName("foo"); FieldMapper f2 = new MockFieldMapper("foo", ft2); try { - lookup.copyAndAddAll("type2", newList(f2)); + lookup.copyAndAddAll("type2", newList(f2), emptyList()); fail("expected type mismatch"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]")); } } - public void testCheckCompatibilityConflict() { + public void testConflictingFieldTypes() { FieldMapper f1 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1), emptyList()); MappedFieldType ft2 = new MockFieldMapper.FakeFieldType(); ft2.setName("foo"); ft2.setBoost(2.0f); FieldMapper f2 = new MockFieldMapper("foo", ft2); - lookup.copyAndAddAll("type", newList(f2)); // boost is updateable, so ok since we are implicitly updating all types - lookup.copyAndAddAll("type2", newList(f2)); // boost is updateable, so ok if forcing + lookup.copyAndAddAll("type", newList(f2), emptyList()); // boost is updateable, so ok since we are implicitly updating all types + lookup.copyAndAddAll("type2", newList(f2), emptyList()); // boost is updateable, so ok if forcing // now with a non changeable setting MappedFieldType ft3 = new MockFieldMapper.FakeFieldType(); ft3.setName("foo"); ft3.setStored(true); FieldMapper f3 = new MockFieldMapper("foo", ft3); try { - lookup.copyAndAddAll("type2", newList(f3)); + lookup.copyAndAddAll("type2", newList(f3), emptyList()); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("has different [store] values")); } } - public void testSimpleMatchFullNames() { - MockFieldMapper f1 = new MockFieldMapper("foo"); - MockFieldMapper f2 = new MockFieldMapper("bar"); + public void testAddFieldAlias() { + MockFieldMapper field = new MockFieldMapper("foo"); + FieldAliasMapper alias = new FieldAliasMapper("alias", "alias", "foo"); + + FieldTypeLookup lookup = new FieldTypeLookup(); + lookup = lookup.copyAndAddAll("type", newList(field), newList(alias)); + + MappedFieldType aliasType = lookup.get("alias"); + assertEquals(field.fieldType(), aliasType); + } + + public void testUpdateFieldAlias() { + // Add an alias 'alias' to the concrete field 'foo'. + MockFieldMapper.FakeFieldType fieldType1 = new MockFieldMapper.FakeFieldType(); + MockFieldMapper field1 = new MockFieldMapper("foo", fieldType1); + FieldAliasMapper alias1 = new FieldAliasMapper("alias", "alias", "foo"); + + FieldTypeLookup lookup = new FieldTypeLookup(); + lookup = lookup.copyAndAddAll("type", newList(field1), newList(alias1)); + + // Check that the alias refers to 'foo'. + MappedFieldType aliasType1 = lookup.get("alias"); + assertEquals(fieldType1, aliasType1); + + // Update the alias to refer to a new concrete field 'bar'. + MockFieldMapper.FakeFieldType fieldType2 = new MockFieldMapper.FakeFieldType(); + fieldType2.setStored(!fieldType1.stored()); + MockFieldMapper field2 = new MockFieldMapper("bar", fieldType2); + + FieldAliasMapper alias2 = new FieldAliasMapper("alias", "alias", "bar"); + lookup = lookup.copyAndAddAll("type", newList(field2), newList(alias2)); + + // Check that the alias now refers to 'bar'. + MappedFieldType aliasType2 = lookup.get("alias"); + assertEquals(fieldType2, aliasType2); + } + + public void testUpdateConcreteFieldWithAlias() { + // Add an alias 'alias' to the concrete field 'foo'. + FieldAliasMapper alias1 = new FieldAliasMapper("alias", "alias", "foo"); + MockFieldMapper.FakeFieldType fieldType1 = new MockFieldMapper.FakeFieldType(); + fieldType1.setBoost(1.0f); + MockFieldMapper field1 = new MockFieldMapper("foo", fieldType1); + + FieldTypeLookup lookup = new FieldTypeLookup(); + lookup = lookup.copyAndAddAll("type", newList(field1), newList(alias1)); + + // Check that the alias maps to this field type. + MappedFieldType aliasType1 = lookup.get("alias"); + assertEquals(fieldType1, aliasType1); + + // Update the boost for field 'foo'. + MockFieldMapper.FakeFieldType fieldType2 = new MockFieldMapper.FakeFieldType(); + fieldType2.setBoost(2.0f); + MockFieldMapper field2 = new MockFieldMapper("foo", fieldType2); + lookup = lookup.copyAndAddAll("type", newList(field2), emptyList()); + + // Check that the alias maps to the new field type. + MappedFieldType aliasType2 = lookup.get("alias"); + assertEquals(fieldType2, aliasType2); + } + + public void testAliasThatRefersToAlias() { + MockFieldMapper field = new MockFieldMapper("foo"); + FieldAliasMapper alias = new FieldAliasMapper("alias", "alias", "foo"); + FieldTypeLookup lookup = new FieldTypeLookup() + .copyAndAddAll("type", newList(field), newList(alias)); + + FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid-alias", "invalid-alias", "alias"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> lookup.copyAndAddAll("type", emptyList(), newList(invalidAlias))); + assertEquals("Invalid [path] value [alias] for field alias [invalid-alias]: an alias" + + " cannot refer to another alias.", e.getMessage()); + } + + public void testAliasThatRefersToItself() { + FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid-alias", "invalid-alias", "invalid-alias"); + FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1, f2)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> lookup.copyAndAddAll("type", emptyList(), newList(invalidAlias))); + assertEquals("Invalid [path] value [invalid-alias] for field alias [invalid-alias]: an alias" + + " cannot refer to itself.", e.getMessage()); + } + + public void testAliasWithNonExistentPath() { + FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid-alias", "invalid-alias", "non-existent"); + + FieldTypeLookup lookup = new FieldTypeLookup(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> lookup.copyAndAddAll("type", emptyList(), newList(invalidAlias))); + assertEquals("Invalid [path] value [non-existent] for field alias [invalid-alias]: an alias" + + " must refer to an existing field in the mappings.", e.getMessage()); + } + + public void testAddAliasWithPreexistingField() { + MockFieldMapper field = new MockFieldMapper("field"); + FieldTypeLookup lookup = new FieldTypeLookup() + .copyAndAddAll("type", newList(field), emptyList()); + + MockFieldMapper invalidField = new MockFieldMapper("invalid"); + FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid", "invalid", "field"); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> lookup.copyAndAddAll("type", newList(invalidField), newList(invalidAlias))); + assertEquals("The name for field alias [invalid] has already been used to define a concrete field.", + e.getMessage()); + } + + public void testAddFieldWithPreexistingAlias() { + MockFieldMapper field = new MockFieldMapper("field"); + FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid", "invalid", "field"); + + FieldTypeLookup lookup = new FieldTypeLookup() + .copyAndAddAll("type", newList(field), newList(invalidAlias)); + + MockFieldMapper invalidField = new MockFieldMapper("invalid"); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> lookup.copyAndAddAll("type", newList(invalidField), emptyList())); + assertEquals("The name for field [invalid] has already been used to define a field alias.", + e.getMessage()); + } + + public void testSimpleMatchToFullName() { + MockFieldMapper field1 = new MockFieldMapper("foo"); + MockFieldMapper field2 = new MockFieldMapper("bar"); + + FieldAliasMapper alias1 = new FieldAliasMapper("food", "food", "foo"); + FieldAliasMapper alias2 = new FieldAliasMapper("barometer", "barometer", "bar"); + + FieldTypeLookup lookup = new FieldTypeLookup(); + lookup = lookup.copyAndAddAll("type", + newList(field1, field2), + newList(alias1, alias2)); + Collection names = lookup.simpleMatchToFullName("b*"); + assertFalse(names.contains("foo")); + assertFalse(names.contains("food")); + assertTrue(names.contains("bar")); + assertTrue(names.contains("barometer")); } public void testIteratorImmutable() { MockFieldMapper f1 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1), emptyList()); try { Iterator itr = lookup.iterator(); @@ -144,7 +280,11 @@ public void testIteratorImmutable() { } } - static List newList(FieldMapper... mapper) { + private static List newList(FieldMapper... mapper) { + return Arrays.asList(mapper); + } + + private static List newList(FieldAliasMapper... mapper) { return Arrays.asList(mapper); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java index 57a6173bc657d..6999e39b70a7d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java @@ -23,10 +23,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; @@ -38,13 +35,14 @@ public void testSimple() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json"); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); - DocumentMapper docMapper = index.mapperService().documentMapper("person"); + + MapperService mapperService = index.mapperService(); + byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), - XContentType.JSON)); + ParsedDocument parsedDoc = mapperService.documentMapper().parse( + SourceToParse.source("test", "person", "1", new BytesArray(json), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); - docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); @@ -52,8 +50,8 @@ public void testSimple() throws Exception { assertThat(f.stringValue(), equalTo("some name")); assertThat(f.fieldType().stored(), equalTo(true)); - FieldMapper fieldMapper = docMapper.mappers().getMapper("name"); - assertThat(fieldMapper.fieldType().stored(), equalTo(true)); + MappedFieldType fieldType = mapperService.fullName("name"); + assertThat(fieldType.stored(), equalTo(true)); boolean stored = false; for (IndexableField field : doc.getFields("age")) { @@ -61,7 +59,7 @@ public void testSimple() throws Exception { } assertTrue(stored); - fieldMapper = docMapper.mappers().getMapper("age"); - assertThat(fieldMapper.fieldType().stored(), equalTo(true)); + fieldType = mapperService.fullName("age"); + assertThat(fieldType.stored(), equalTo(true)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index facafaf180ec2..eabf0a849fa39 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -287,7 +287,7 @@ public void testIgnoreZValue() throws IOException { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoPointFieldMapper.class)); boolean ignoreZValue = ((GeoPointFieldMapper)fieldMapper).ignoreZValue().value(); @@ -364,10 +364,10 @@ public void testNullValue() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoPointFieldMapper.class)); - Object nullValue = fieldMapper.fieldType().nullValue(); + Object nullValue = ((GeoPointFieldMapper) fieldMapper).fieldType().nullValue(); assertThat(nullValue, equalTo(new GeoPoint(1, 2))); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index 7ff8c28f6dc63..4c947a44a0a12 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -59,7 +59,7 @@ public void testDefaultConfiguration() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -83,7 +83,7 @@ public void testOrientationParsing() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); ShapeBuilder.Orientation orientation = ((GeoShapeFieldMapper)fieldMapper).fieldType().orientation(); @@ -121,7 +121,7 @@ public void testCoerceParsing() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); boolean coerce = ((GeoShapeFieldMapper)fieldMapper).coerce().value(); @@ -157,7 +157,7 @@ public void testIgnoreZValue() throws IOException { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); boolean ignoreZValue = ((GeoShapeFieldMapper)fieldMapper).ignoreZValue().value(); @@ -191,7 +191,7 @@ public void testIgnoreMalformedParsing() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); Explicit ignoreMalformed = ((GeoShapeFieldMapper)fieldMapper).ignoreMalformed(); @@ -225,7 +225,7 @@ public void testGeohashConfiguration() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -248,7 +248,7 @@ public void testQuadtreeConfiguration() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -276,7 +276,7 @@ public void testLevelPrecisionConfiguration() throws IOException { DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -300,7 +300,7 @@ public void testLevelPrecisionConfiguration() throws IOException { DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -326,7 +326,7 @@ public void testLevelPrecisionConfiguration() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -350,7 +350,7 @@ public void testLevelPrecisionConfiguration() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -373,7 +373,7 @@ public void testLevelPrecisionConfiguration() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -395,7 +395,7 @@ public void testPointsOnlyOption() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -418,7 +418,7 @@ public void testLevelDefaults() throws IOException { DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -440,7 +440,7 @@ public void testLevelDefaults() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -475,7 +475,7 @@ public void testGeoShapeMapperMerge() throws Exception { } // verify nothing changed - FieldMapper fieldMapper = docMapper.mappers().getMapper("shape"); + Mapper fieldMapper = docMapper.mappers().getMapper("shape"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -600,7 +600,7 @@ public void testPointsOnlyDefaultsWithTermStrategy() throws IOException { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java index 32084c50310fc..8060c0a3f92c8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java @@ -38,69 +38,68 @@ public void testMergeMultiField() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); + assertThat(mapperService.fullName("name.indexed"), nullValue()); BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); - Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = mapperService.documentMapper().parse( + SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed2"), nullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed2"), nullValue()); + assertThat(mapperService.fullName("name.not_indexed3"), nullValue()); - doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + doc = mapperService.documentMapper().parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed2"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed2"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed2"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), notNullValue()); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed2"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed3"), notNullValue()); } public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); + assertThat(mapperService.fullName("name.indexed"), nullValue()); BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); - Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = mapperService.documentMapper().parse( + SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); @@ -108,32 +107,31 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed2"), nullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed2"), nullValue()); + assertThat(mapperService.fullName("name.not_indexed3"), nullValue()); - doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + doc = mapperService.documentMapper().parse( + SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed2"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed2"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json"); @@ -146,10 +144,10 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { } // There are conflicts, so the `name.not_indexed3` has not been added - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed2"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed2"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed3"), nullValue()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeValidatorTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeValidatorTests.java new file mode 100644 index 0000000000000..af17918baacb9 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeValidatorTests.java @@ -0,0 +1,118 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonList; + +public class MapperMergeValidatorTests extends ESTestCase { + + public void testDuplicateFieldAliasAndObject() { + ObjectMapper objectMapper = createObjectMapper("some.path"); + FieldAliasMapper aliasMapper = new FieldAliasMapper("path", "some.path", "field"); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + MapperMergeValidator.validateMapperStructure("type", + singletonList(objectMapper), + emptyList(), + singletonList(aliasMapper), + emptyMap(), + new FieldTypeLookup())); + assertEquals("Field [some.path] is defined both as an object and a field in [type]", e.getMessage()); + } + + public void testFieldAliasWithNestedScope() { + ObjectMapper objectMapper = createNestedObjectMapper("nested"); + FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "nested.alias", "nested.field"); + + MapperMergeValidator.validateFieldReferences(emptyList(), + singletonList(aliasMapper), + Collections.singletonMap("nested", objectMapper), + new FieldTypeLookup()); + } + + public void testFieldAliasWithDifferentObjectScopes() { + Map fullPathObjectMappers = new HashMap<>(); + fullPathObjectMappers.put("object1", createObjectMapper("object1")); + fullPathObjectMappers.put("object2", createObjectMapper("object2")); + + FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "object2.alias", "object1.field"); + + MapperMergeValidator.validateFieldReferences(emptyList(), + singletonList(aliasMapper), + fullPathObjectMappers, + new FieldTypeLookup()); + } + + public void testFieldAliasWithNestedTarget() { + ObjectMapper objectMapper = createNestedObjectMapper("nested"); + FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "alias", "nested.field"); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + MapperMergeValidator.validateFieldReferences(emptyList(), + singletonList(aliasMapper), + Collections.singletonMap("nested", objectMapper), + new FieldTypeLookup())); + + String expectedMessage = "Invalid [path] value [nested.field] for field alias [alias]: " + + "an alias must have the same nested scope as its target. The alias is not nested, " + + "but the target's nested scope is [nested]."; + assertEquals(expectedMessage, e.getMessage()); + } + + public void testFieldAliasWithDifferentNestedScopes() { + Map fullPathObjectMappers = new HashMap<>(); + fullPathObjectMappers.put("nested1", createNestedObjectMapper("nested1")); + fullPathObjectMappers.put("nested2", createNestedObjectMapper("nested2")); + + FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "nested2.alias", "nested1.field"); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + MapperMergeValidator.validateFieldReferences(emptyList(), + singletonList(aliasMapper), + fullPathObjectMappers, + new FieldTypeLookup())); + + + String expectedMessage = "Invalid [path] value [nested1.field] for field alias [nested2.alias]: " + + "an alias must have the same nested scope as its target. The alias's nested scope is [nested2], " + + "but the target's nested scope is [nested1]."; + assertEquals(expectedMessage, e.getMessage()); + } + + private static ObjectMapper createObjectMapper(String name) { + return new ObjectMapper(name, name, true, + ObjectMapper.Nested.NO, + ObjectMapper.Dynamic.FALSE, emptyMap(), Settings.EMPTY); + } + + private static ObjectMapper createNestedObjectMapper(String name) { + return new ObjectMapper(name, name, true, + ObjectMapper.Nested.newNested(false, false), + ObjectMapper.Dynamic.FALSE, emptyMap(), Settings.EMPTY); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 6bccb7106f656..20e0dd4639c3a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -235,6 +235,41 @@ public void testIndexSortWithNestedFields() throws IOException { containsString("cannot have nested fields when index sort is activated")); } + public void testFieldAliasWithMismatchedNestedScope() throws Throwable { + IndexService indexService = createIndex("test"); + MapperService mapperService = indexService.mapperService(); + + CompressedXContent mapping = new CompressedXContent(BytesReference.bytes( + XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("nested") + .field("type", "nested") + .startObject("properties") + .startObject("field") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject())); + + mapperService.merge("type", mapping, MergeReason.MAPPING_UPDATE); + + CompressedXContent mappingUpdate = new CompressedXContent(BytesReference.bytes( + XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("alias") + .field("type", "alias") + .field("path", "nested.field") + .endObject() + .endObject() + .endObject())); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> mapperService.merge("type", mappingUpdate, MergeReason.MAPPING_UPDATE)); + assertThat(e.getMessage(), containsString("Invalid [path] value [nested.field] for field alias [alias]")); + } + public void testForbidMultipleTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java index 45bb8090206bf..0f3d5193c285b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.ParseContext.Document; +import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; @@ -55,9 +56,15 @@ public void testMultiFieldMultiFields() throws Exception { } private void testMultiField(String mapping) throws Exception { - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); + IndexService indexService = createIndex("test"); + MapperService mapperService = indexService.mapperService(); + + indexService.mapperService().merge("person", new CompressedXContent(mapping), + MapperService.MergeReason.MAPPING_UPDATE); + BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); - Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = mapperService.documentMapper().parse( + SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); @@ -84,37 +91,37 @@ private void testMultiField(String mapping) throws Exception { assertThat(f.name(), equalTo("object1.multi1.string")); assertThat(f.binaryValue(), equalTo(new BytesRef("2010-01-01"))); - assertThat(docMapper.mappers().getMapper("name"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name"), instanceOf(TextFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name").fieldType().stored(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name").fieldType().tokenized(), equalTo(true)); - - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.indexed"), instanceOf(TextFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name.indexed").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("name.indexed").fieldType().tokenized(), equalTo(true)); - - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), instanceOf(TextFieldMapper.class)); - assertEquals(IndexOptions.NONE, docMapper.mappers().getMapper("name.not_indexed").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.not_indexed").fieldType().stored(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.not_indexed").fieldType().tokenized(), equalTo(true)); - - assertThat(docMapper.mappers().getMapper("name.test1"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.test1"), instanceOf(TextFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name.test1").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.test1").fieldType().stored(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.test1").fieldType().tokenized(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.test1").fieldType().eagerGlobalOrdinals(), equalTo(true)); - - assertThat(docMapper.mappers().getMapper("object1.multi1"), notNullValue()); - assertThat(docMapper.mappers().getMapper("object1.multi1"), instanceOf(DateFieldMapper.class)); - assertThat(docMapper.mappers().getMapper("object1.multi1.string"), notNullValue()); - assertThat(docMapper.mappers().getMapper("object1.multi1.string"), instanceOf(KeywordFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("object1.multi1.string").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("object1.multi1.string").fieldType().tokenized(), equalTo(false)); + assertThat(mapperService.fullName("name"), notNullValue()); + assertThat(mapperService.fullName("name"), instanceOf(TextFieldType.class)); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); + assertThat(mapperService.fullName("name").stored(), equalTo(true)); + assertThat(mapperService.fullName("name").tokenized(), equalTo(true)); + + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name"), instanceOf(TextFieldType.class)); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name.indexed").indexOptions()); + assertThat(mapperService.fullName("name.indexed").stored(), equalTo(false)); + assertThat(mapperService.fullName("name.indexed").tokenized(), equalTo(true)); + + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name"), instanceOf(TextFieldType.class)); + assertEquals(IndexOptions.NONE, mapperService.fullName("name.not_indexed").indexOptions()); + assertThat(mapperService.fullName("name.not_indexed").stored(), equalTo(true)); + assertThat(mapperService.fullName("name.not_indexed").tokenized(), equalTo(true)); + + assertThat(mapperService.fullName("name.test1"), notNullValue()); + assertThat(mapperService.fullName("name"), instanceOf(TextFieldType.class)); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name.test1").indexOptions()); + assertThat(mapperService.fullName("name.test1").stored(), equalTo(true)); + assertThat(mapperService.fullName("name.test1").tokenized(), equalTo(true)); + assertThat(mapperService.fullName("name.test1").eagerGlobalOrdinals(), equalTo(true)); + + assertThat(mapperService.fullName("object1.multi1"), notNullValue()); + assertThat(mapperService.fullName("object1.multi1"), instanceOf(DateFieldMapper.DateFieldType.class)); + assertThat(mapperService.fullName("object1.multi1.string"), notNullValue()); + assertThat(mapperService.fullName("object1.multi1.string"), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + assertNotSame(IndexOptions.NONE, mapperService.fullName("object1.multi1.string").indexOptions()); + assertThat(mapperService.fullName("object1.multi1.string").tokenized(), equalTo(false)); } public void testBuildThenParse() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/PathMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/PathMapperTests.java index 271501281cd99..6bb15432b1fd3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/PathMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/PathMapperTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java index 3ad53513c5185..9546fb5136e31 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java @@ -23,10 +23,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; @@ -38,13 +35,14 @@ public void testSimple() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json"); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); - DocumentMapper docMapper = index.mapperService().documentMapper("person"); + + MapperService mapperService = index.mapperService(); + byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), - XContentType.JSON)); + ParsedDocument parsedDoc = mapperService.documentMapper().parse( + SourceToParse.source("test", "person", "1", new BytesArray(json), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); - docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); @@ -52,26 +50,26 @@ public void testSimple() throws Exception { assertThat(f.stringValue(), equalTo("top_level")); assertThat(f.fieldType().stored(), equalTo(false)); - FieldMapper fieldMapper = docMapper.mappers().getMapper("name"); - assertThat(fieldMapper.fieldType().stored(), equalTo(false)); + MappedFieldType fieldType = mapperService.fullName("name"); + assertThat(fieldType.stored(), equalTo(false)); f = doc.getField("obj1.name"); assertThat(f.name(), equalTo("obj1.name")); assertThat(f.fieldType().stored(), equalTo(true)); - fieldMapper = docMapper.mappers().getMapper("obj1.name"); - assertThat(fieldMapper.fieldType().stored(), equalTo(true)); + fieldType = mapperService.fullName("obj1.name"); + assertThat(fieldType.stored(), equalTo(true)); f = doc.getField("obj1.obj2.name"); assertThat(f.name(), equalTo("obj1.obj2.name")); assertThat(f.fieldType().stored(), equalTo(false)); - fieldMapper = docMapper.mappers().getMapper("obj1.obj2.name"); - assertThat(fieldMapper.fieldType().stored(), equalTo(false)); + fieldType = mapperService.fullName("obj1.obj2.name"); + assertThat(fieldType.stored(), equalTo(false)); // verify more complex path_match expressions - fieldMapper = docMapper.mappers().getMapper("obj3.obj4.prop1"); - assertNotNull(fieldMapper); + fieldType = mapperService.fullName("obj3.obj4.prop1"); + assertNotNull(fieldType); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index 95ffc373e6b8f..a07192df8047f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -28,13 +28,14 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.Collections; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; @@ -84,9 +85,11 @@ public void testBytesAndNumericRepresentation() throws Exception { DirectoryReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); - CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor( - Collections.emptySet(), Collections.singletonList("field*"), false); + Set fieldNames = Sets.newHashSet("field1", "field2", "field3", "field4", "field5", + "field6", "field7", "field8", "field9", "field10"); + CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor(fieldNames, false); searcher.doc(0, fieldsVisitor); + fieldsVisitor.postProcess(mapperService); assertThat(fieldsVisitor.fields().size(), equalTo(10)); assertThat(fieldsVisitor.fields().get("field1").size(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index e3dc8ff0b78fc..4736cbe471289 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -490,9 +490,10 @@ public void testEagerGlobalOrdinals() throws IOException { .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, mapper.mappingSource().toString()); - assertTrue(mapper.mappers().getMapper("field").fieldType().eagerGlobalOrdinals()); + + FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field"); + assertTrue(fieldMapper.fieldType().eagerGlobalOrdinals()); } public void testFielddata() throws IOException { @@ -504,8 +505,10 @@ public void testFielddata() throws IOException { DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, disabledMapper.mappingSource().toString()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> disabledMapper.mappers().getMapper("field").fieldType().fielddataBuilder("test")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { + FieldMapper fieldMapper = (FieldMapper) disabledMapper.mappers().getMapper("field"); + fieldMapper.fieldType().fielddataBuilder("test"); + }); assertThat(e.getMessage(), containsString("Fielddata is disabled")); mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") @@ -518,7 +521,9 @@ public void testFielddata() throws IOException { DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, enabledMapper.mappingSource().toString()); - enabledMapper.mappers().getMapper("field").fieldType().fielddataBuilder("test"); // no exception this time + + FieldMapper enabledFieldMapper = (FieldMapper) enabledMapper.mappers().getMapper("field"); + enabledFieldMapper.fieldType().fielddataBuilder("test"); // no exception this time String illegalMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") @@ -547,7 +552,9 @@ public void testFrequencyFilter() throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - TextFieldType fieldType = (TextFieldType) mapper.mappers().getMapper("field").fieldType(); + TextFieldMapper fieldMapper = (TextFieldMapper) mapper.mappers().getMapper("field"); + TextFieldType fieldType = fieldMapper.fieldType(); + assertThat(fieldType.fielddataMinFrequency(), equalTo(2d)); assertThat(fieldType.fielddataMaxFrequency(), equalTo((double) Integer.MAX_VALUE)); assertThat(fieldType.fielddataMinSegmentSize(), equalTo(1000)); @@ -630,7 +637,7 @@ public void testIndexPrefixIndexTypes() throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); FieldType ft = prefix.fieldType; assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, ft.indexOptions()); } @@ -646,7 +653,7 @@ public void testIndexPrefixIndexTypes() throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); FieldType ft = prefix.fieldType; assertEquals(IndexOptions.DOCS, ft.indexOptions()); assertFalse(ft.storeTermVectors()); @@ -663,7 +670,7 @@ public void testIndexPrefixIndexTypes() throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); FieldType ft = prefix.fieldType; if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions()); @@ -684,7 +691,7 @@ public void testIndexPrefixIndexTypes() throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); FieldType ft = prefix.fieldType; if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions()); @@ -705,7 +712,7 @@ public void testIndexPrefixIndexTypes() throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); FieldType ft = prefix.fieldType; if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions()); @@ -836,10 +843,13 @@ public void testIndexPrefixMapping() throws IOException { assertThat(mapper.mappers().getMapper("field._index_prefix").toString(), containsString("prefixChars=1:10")); - Query q = mapper.mappers().getMapper("field").fieldType().prefixQuery("goin", CONSTANT_SCORE_REWRITE, queryShardContext); + FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field"); + MappedFieldType fieldType = fieldMapper.fieldType; + + Query q = fieldType.prefixQuery("goin", CONSTANT_SCORE_REWRITE, queryShardContext); + assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field._index_prefix", "goin"))), q); - q = mapper.mappers().getMapper("field").fieldType().prefixQuery("internationalisatio", - CONSTANT_SCORE_REWRITE, queryShardContext); + q = fieldType.prefixQuery("internationalisatio", CONSTANT_SCORE_REWRITE, queryShardContext); assertEquals(new PrefixQuery(new Term("field", "internationalisatio")), q); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference @@ -864,17 +874,16 @@ public void testIndexPrefixMapping() throws IOException { CompressedXContent json = new CompressedXContent(mapping); DocumentMapper mapper = parser.parse("type", json); - Query q1 = mapper.mappers().getMapper("field").fieldType().prefixQuery("g", - CONSTANT_SCORE_REWRITE, queryShardContext); + FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field"); + MappedFieldType fieldType = fieldMapper.fieldType; + + Query q1 = fieldType.prefixQuery("g", CONSTANT_SCORE_REWRITE, queryShardContext); assertThat(q1, instanceOf(PrefixQuery.class)); - Query q2 = mapper.mappers().getMapper("field").fieldType().prefixQuery("go", - CONSTANT_SCORE_REWRITE, queryShardContext); + Query q2 = fieldType.prefixQuery("go", CONSTANT_SCORE_REWRITE, queryShardContext); assertThat(q2, instanceOf(ConstantScoreQuery.class)); - Query q5 = mapper.mappers().getMapper("field").fieldType().prefixQuery("going", - CONSTANT_SCORE_REWRITE, queryShardContext); + Query q5 = fieldType.prefixQuery("going", CONSTANT_SCORE_REWRITE, queryShardContext); assertThat(q5, instanceOf(ConstantScoreQuery.class)); - Query q6 = mapper.mappers().getMapper("field").fieldType().prefixQuery("goings", - CONSTANT_SCORE_REWRITE, queryShardContext); + Query q6 = fieldType.prefixQuery("goings", CONSTANT_SCORE_REWRITE, queryShardContext); assertThat(q6, instanceOf(PrefixQuery.class)); } diff --git a/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java index 5478e172fedb7..fe39345dadddd 100644 --- a/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; @@ -27,6 +28,7 @@ import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; @@ -39,19 +41,16 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase getAlternateVersions() { protected void doAssertLuceneQuery(CommonTermsQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { assertThat(query, instanceOf(ExtendedCommonTermsQuery.class)); ExtendedCommonTermsQuery extendedCommonTermsQuery = (ExtendedCommonTermsQuery) query; + + List terms = extendedCommonTermsQuery.getTerms(); + if (!terms.isEmpty()) { + String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); + String actualFieldName = terms.iterator().next().field(); + assertThat(actualFieldName, equalTo(expectedFieldName)); + } + assertThat(extendedCommonTermsQuery.getHighFreqMinimumNumberShouldMatchSpec(), equalTo(queryBuilder.highFreqMinimumShouldMatch())); assertThat(extendedCommonTermsQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo(queryBuilder.lowFreqMinimumShouldMatch())); } diff --git a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java index d4547eee26f89..88742e08554ff 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java @@ -76,7 +76,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, if (fields.size() == 1) { assertThat(query, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query; - String field = fields.iterator().next(); + String field = expectedFieldName(fields.iterator().next()); assertThat(constantScoreQuery.getQuery(), instanceOf(TermQuery.class)); TermQuery termQuery = (TermQuery) constantScoreQuery.getQuery(); assertEquals(field, termQuery.getTerm().text()); @@ -99,7 +99,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, } else if (fields.size() == 1) { assertThat(query, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query; - String field = fields.iterator().next(); + String field = expectedFieldName(fields.iterator().next()); if (context.getQueryShardContext().getObjectMapper(field) != null) { assertThat(constantScoreQuery.getQuery(), instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) constantScoreQuery.getQuery(); diff --git a/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java index d7fe00e730c91..9d98a12358f28 100644 --- a/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java @@ -21,7 +21,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; @@ -45,11 +44,7 @@ protected FieldMaskingSpanQueryBuilder doCreateTestQueryBuilder() { @Override protected void doAssertLuceneQuery(FieldMaskingSpanQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { - String fieldInQuery = queryBuilder.fieldName(); - MappedFieldType fieldType = context.getQueryShardContext().fieldMapper(fieldInQuery); - if (fieldType != null) { - fieldInQuery = fieldType.name(); - } + String fieldInQuery = expectedFieldName(queryBuilder.fieldName()); assertThat(query, instanceOf(FieldMaskingSpanQuery.class)); FieldMaskingSpanQuery fieldMaskingSpanQuery = (FieldMaskingSpanQuery) query; assertThat(fieldMaskingSpanQuery.getField(), equalTo(fieldInQuery)); diff --git a/server/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java index 4fae80d09a51e..0a67ab50ae8ca 100644 --- a/server/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java @@ -41,7 +41,8 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase getAlternateVersions() { @Override protected void doAssertLuceneQuery(FuzzyQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { assertThat(query, instanceOf(FuzzyQuery.class)); + + FuzzyQuery fuzzyQuery = (FuzzyQuery) query; + String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); + String actualFieldName = fuzzyQuery.getTerm().field(); + assertThat(actualFieldName, equalTo(expectedFieldName)); } public void testIllegalArguments() { diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java index d1f7972c7f441..d0d075af21a27 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -39,6 +39,7 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase { /** Randomly generate either NaN or one of the two infinity values. */ @@ -46,7 +47,8 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase { @Override protected GeoPolygonQueryBuilder doCreateTestQueryBuilder() { + String fieldName = randomFrom(GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME); List polygon = randomPolygon(); - GeoPolygonQueryBuilder builder = new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, polygon); + GeoPolygonQueryBuilder builder = new GeoPolygonQueryBuilder(fieldName, polygon); if (randomBoolean()) { builder.setValidationMethod(randomFrom(GeoValidationMethod.values())); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java index e5da5d7f97146..694b63b141b82 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java @@ -42,13 +42,13 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase { @Override protected MatchPhrasePrefixQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(STRING_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, - DOUBLE_FIELD_NAME, DATE_FIELD_NAME); + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, + INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); if (fieldName.equals(DATE_FIELD_NAME)) { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); } Object value; - if (fieldName.equals(STRING_FIELD_NAME)) { + if (isTextField(fieldName)) { int terms = randomIntBetween(0, 3); StringBuilder builder = new StringBuilder(); for (int i = 0; i < terms; i++) { @@ -61,7 +61,7 @@ protected MatchPhrasePrefixQueryBuilder doCreateTestQueryBuilder() { MatchPhrasePrefixQueryBuilder matchQuery = new MatchPhrasePrefixQueryBuilder(fieldName, value); - if (randomBoolean() && fieldName.equals(STRING_FIELD_NAME)) { + if (randomBoolean() && isTextField(fieldName)) { matchQuery.analyzer(randomFrom("simple", "keyword", "whitespace")); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java index fb03d54aeff8c..e59c5d6e0c4c0 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java @@ -45,13 +45,13 @@ public class MatchPhraseQueryBuilderTests extends AbstractQueryTestCase { @Override protected MatchPhraseQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(STRING_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, - DOUBLE_FIELD_NAME, DATE_FIELD_NAME); + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, + INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); if (fieldName.equals(DATE_FIELD_NAME)) { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); } Object value; - if (fieldName.equals(STRING_FIELD_NAME)) { + if (isTextField(fieldName)) { int terms = randomIntBetween(0, 3); StringBuilder builder = new StringBuilder(); for (int i = 0; i < terms; i++) { @@ -64,7 +64,7 @@ protected MatchPhraseQueryBuilder doCreateTestQueryBuilder() { MatchPhraseQueryBuilder matchQuery = new MatchPhraseQueryBuilder(fieldName, value); - if (randomBoolean() && fieldName.equals(STRING_FIELD_NAME)) { + if (randomBoolean() && isTextField(fieldName)) { matchQuery.analyzer(randomFrom("simple", "keyword", "whitespace")); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 05a4b99f19d1a..7d72ad9a4e16e 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -47,6 +48,7 @@ import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; @@ -59,13 +61,13 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase { @Override protected MatchQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(STRING_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, - DOUBLE_FIELD_NAME, DATE_FIELD_NAME); + String fieldName = STRING_ALIAS_FIELD_NAME; //randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, + //INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); if (fieldName.equals(DATE_FIELD_NAME)) { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); } Object value; - if (fieldName.equals(STRING_FIELD_NAME)) { + if (isTextField(fieldName)) { int terms = randomIntBetween(0, 3); StringBuilder builder = new StringBuilder(); for (int i = 0; i < terms; i++) { @@ -79,11 +81,11 @@ protected MatchQueryBuilder doCreateTestQueryBuilder() { MatchQueryBuilder matchQuery = new MatchQueryBuilder(fieldName, value); matchQuery.operator(randomFrom(Operator.values())); - if (randomBoolean() && fieldName.equals(STRING_FIELD_NAME)) { + if (randomBoolean() && isTextField(fieldName)) { matchQuery.analyzer(randomFrom("simple", "keyword", "whitespace")); } - if (fieldName.equals(STRING_FIELD_NAME) && randomBoolean()) { + if (isTextField(fieldName) && randomBoolean()) { matchQuery.fuzziness(randomFuzziness(fieldName)); } @@ -179,6 +181,12 @@ protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, if (query instanceof ExtendedCommonTermsQuery) { assertTrue(queryBuilder.cutoffFrequency() != null); ExtendedCommonTermsQuery ectq = (ExtendedCommonTermsQuery) query; + List terms = ectq.getTerms(); + if (!terms.isEmpty()) { + Term term = terms.iterator().next(); + String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); + assertThat(term.field(), equalTo(expectedFieldName)); + } assertEquals(queryBuilder.cutoffFrequency(), ectq.getMaxTermFrequency(), Float.MIN_VALUE); } @@ -195,6 +203,9 @@ protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, termLcMatcher = either(termLcMatcher).or(equalTo(originalTermLc.substring(0, 1))); } assertThat(actualTermLc, termLcMatcher); + + String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); + assertThat(expectedFieldName, equalTo(fuzzyQuery.getTerm().field())); assertThat(queryBuilder.prefixLength(), equalTo(fuzzyQuery.getPrefixLength())); assertThat(queryBuilder.fuzzyTranspositions(), equalTo(fuzzyQuery.getTranspositions())); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index de044d5879312..b54ce571453b3 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -91,7 +91,7 @@ public void setup() { } private static String[] randomStringFields() { - String[] mappedStringFields = new String[]{STRING_FIELD_NAME, STRING_FIELD_NAME_2}; + String[] mappedStringFields = new String[]{STRING_FIELD_NAME, STRING_FIELD_NAME_2, STRING_ALIAS_FIELD_NAME}; String[] unmappedStringFields = generateRandomStringArray(2, 5, false, false); return Stream.concat(Arrays.stream(mappedStringFields), Arrays.stream(unmappedStringFields)).toArray(String[]::new); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index b6d4816b01f4a..9eed4d9f1c1df 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -230,9 +230,10 @@ public void testToQueryFieldsWildcard() throws Exception { assertThat(query, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; assertThat(dQuery.getTieBreakerMultiplier(), equalTo(1.0f)); - assertThat(dQuery.getDisjuncts().size(), equalTo(2)); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); + assertThat(dQuery.getDisjuncts().size(), equalTo(3)); + assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); + assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); + assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 2).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); } public void testToQueryFieldMissing() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java index 11cf639974557..c78c83c154ff2 100644 --- a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java @@ -60,7 +60,9 @@ protected Map getAlternateVersions() { } private static PrefixQueryBuilder randomPrefixQuery() { - String fieldName = randomBoolean() ? STRING_FIELD_NAME : randomAlphaOfLengthBetween(1, 10); + String fieldName = randomFrom(STRING_FIELD_NAME, + STRING_ALIAS_FIELD_NAME, + randomAlphaOfLengthBetween(1, 10)); String value = randomAlphaOfLengthBetween(1, 10); return new PrefixQueryBuilder(fieldName, value); } @@ -69,7 +71,9 @@ private static PrefixQueryBuilder randomPrefixQuery() { protected void doAssertLuceneQuery(PrefixQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { assertThat(query, instanceOf(PrefixQuery.class)); PrefixQuery prefixQuery = (PrefixQuery) query; - assertThat(prefixQuery.getPrefix().field(), equalTo(queryBuilder.fieldName())); + + String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); + assertThat(prefixQuery.getPrefix().field(), equalTo(expectedFieldName)); assertThat(prefixQuery.getPrefix().text(), equalTo(queryBuilder.value())); } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 1c8bc48fb7ef3..4b9e0f5a66ea8 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -90,12 +90,16 @@ protected QueryStringQueryBuilder doCreateTestQueryBuilder() { } QueryStringQueryBuilder queryStringQueryBuilder = new QueryStringQueryBuilder(query); if (randomBoolean()) { - queryStringQueryBuilder.defaultField(randomBoolean() ? - STRING_FIELD_NAME : randomAlphaOfLengthBetween(1, 10)); + String defaultFieldName = randomFrom(STRING_FIELD_NAME, + STRING_ALIAS_FIELD_NAME, + randomAlphaOfLengthBetween(1, 10)); + queryStringQueryBuilder.defaultField(defaultFieldName); } else { int numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { - String fieldName = randomBoolean() ? STRING_FIELD_NAME : randomAlphaOfLengthBetween(1, 10); + String fieldName = randomFrom(STRING_FIELD_NAME, + STRING_ALIAS_FIELD_NAME, + randomAlphaOfLengthBetween(1, 10)); if (randomBoolean()) { queryStringQueryBuilder.field(fieldName); } else { @@ -508,10 +512,12 @@ public void testToQueryFieldsWildcard() throws Exception { Query query = queryStringQuery("test").field("mapped_str*").toQuery(createShardContext()); assertThat(query, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; - assertThat(dQuery.getDisjuncts().size(), equalTo(2)); + assertThat(dQuery.getDisjuncts().size(), equalTo(3)); assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), - equalTo(new Term(STRING_FIELD_NAME_2, "test"))); + equalTo(new Term(STRING_FIELD_NAME, "test"))); assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), + equalTo(new Term(STRING_FIELD_NAME_2, "test"))); + assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 2).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); } diff --git a/server/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java b/server/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java index 9b064485592a7..ecd767b9d657f 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java +++ b/server/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java @@ -22,10 +22,12 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import org.elasticsearch.test.AbstractQueryTestCase; - import java.util.Random; +import static org.elasticsearch.test.AbstractBuilderTestCase.STRING_ALIAS_FIELD_NAME; +import static org.elasticsearch.test.AbstractBuilderTestCase.STRING_FIELD_NAME; +import static org.elasticsearch.test.ESTestCase.randomFrom; + /** * Utility class for creating random QueryBuilders. * So far only leaf queries like {@link MatchAllQueryBuilder}, {@link TermQueryBuilder} or @@ -62,9 +64,10 @@ public static MultiTermQueryBuilder createMultiTermQuery(Random r) { // for now, only use String Rangequeries for MultiTerm test, numeric and date makes little sense // see issue #12123 for discussion MultiTermQueryBuilder multiTermQueryBuilder; + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME); switch(RandomNumbers.randomIntBetween(r, 0, 3)) { case 0: - RangeQueryBuilder stringRangeQuery = new RangeQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME); + RangeQueryBuilder stringRangeQuery = new RangeQueryBuilder(fieldName); stringRangeQuery.from("a" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10)); stringRangeQuery.to("z" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10)); multiTermQueryBuilder = stringRangeQuery; @@ -76,7 +79,7 @@ public static MultiTermQueryBuilder createMultiTermQuery(Random r) { multiTermQueryBuilder = new WildcardQueryBuilderTests().createTestQueryBuilder(); break; case 3: - multiTermQueryBuilder = new FuzzyQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME, + multiTermQueryBuilder = new FuzzyQueryBuilder(fieldName, RandomStrings.randomAsciiOfLengthBetween(r, 1, 10)); break; default: diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index 3668c7dec17a0..ba02ddaf05c6a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -65,13 +65,15 @@ protected RangeQueryBuilder doCreateTestQueryBuilder() { switch (randomIntBetween(0, 2)) { case 0: // use mapped integer field for numeric range queries - query = new RangeQueryBuilder(randomBoolean() ? INT_FIELD_NAME : INT_RANGE_FIELD_NAME); + query = new RangeQueryBuilder(randomFrom( + INT_FIELD_NAME, INT_RANGE_FIELD_NAME, INT_ALIAS_FIELD_NAME)); query.from(randomIntBetween(1, 100)); query.to(randomIntBetween(101, 200)); break; case 1: // use mapped date field, using date string representation - query = new RangeQueryBuilder(randomBoolean() ? DATE_FIELD_NAME : DATE_RANGE_FIELD_NAME); + query = new RangeQueryBuilder(randomFrom( + DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, DATE_ALIAS_FIELD_NAME)); query.from(new DateTime(System.currentTimeMillis() - randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); // Create timestamp option only then we have a date mapper, @@ -87,7 +89,7 @@ protected RangeQueryBuilder doCreateTestQueryBuilder() { break; case 2: default: - query = new RangeQueryBuilder(STRING_FIELD_NAME); + query = new RangeQueryBuilder(randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME)); query.from("a" + randomAlphaOfLengthBetween(1, 10)); query.to("z" + randomAlphaOfLengthBetween(1, 10)); break; @@ -128,17 +130,18 @@ protected Map getAlternateVersions() { @Override protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { + String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); if (queryBuilder.from() == null && queryBuilder.to() == null) { final Query expectedQuery; if (getCurrentTypes().length > 0) { if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) && context.mapperService().fullName(queryBuilder.fieldName()).hasDocValues()) { - expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(queryBuilder.fieldName())); + expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(expectedFieldName)); } else if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) && context.mapperService().fullName(queryBuilder.fieldName()).omitNorms() == false) { - expectedQuery = new ConstantScoreQuery(new NormsFieldExistsQuery(queryBuilder.fieldName())); + expectedQuery = new ConstantScoreQuery(new NormsFieldExistsQuery(expectedFieldName)); } else { - expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, queryBuilder.fieldName()))); + expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, expectedFieldName))); } } else { expectedQuery = new MatchNoDocsQuery("no mappings yet"); @@ -146,18 +149,18 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, assertThat(query, equalTo(expectedQuery)); } else if (getCurrentTypes().length == 0 || - (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false - && queryBuilder.fieldName().equals(INT_FIELD_NAME) == false - && queryBuilder.fieldName().equals(DATE_RANGE_FIELD_NAME) == false - && queryBuilder.fieldName().equals(INT_RANGE_FIELD_NAME) == false)) { + (expectedFieldName.equals(DATE_FIELD_NAME) == false + && expectedFieldName.equals(INT_FIELD_NAME) == false + && expectedFieldName.equals(DATE_RANGE_FIELD_NAME) == false + && expectedFieldName.equals(INT_RANGE_FIELD_NAME) == false)) { assertThat(query, instanceOf(TermRangeQuery.class)); TermRangeQuery termRangeQuery = (TermRangeQuery) query; - assertThat(termRangeQuery.getField(), equalTo(queryBuilder.fieldName())); + assertThat(termRangeQuery.getField(), equalTo(expectedFieldName)); assertThat(termRangeQuery.getLowerTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.from()))); assertThat(termRangeQuery.getUpperTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.to()))); assertThat(termRangeQuery.includesLower(), equalTo(queryBuilder.includeLower())); assertThat(termRangeQuery.includesUpper(), equalTo(queryBuilder.includeUpper())); - } else if (queryBuilder.fieldName().equals(DATE_FIELD_NAME)) { + } else if (expectedFieldName.equals(DATE_FIELD_NAME)) { assertThat(query, instanceOf(IndexOrDocValuesQuery.class)); query = ((IndexOrDocValuesQuery) query).getIndexQuery(); assertThat(query, instanceOf(PointRangeQuery.class)); @@ -202,7 +205,7 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, } } assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, minLong, maxLong), query); - } else if (queryBuilder.fieldName().equals(INT_FIELD_NAME)) { + } else if (expectedFieldName.equals(INT_FIELD_NAME)) { assertThat(query, instanceOf(IndexOrDocValuesQuery.class)); query = ((IndexOrDocValuesQuery) query).getIndexQuery(); assertThat(query, instanceOf(PointRangeQuery.class)); @@ -225,7 +228,7 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, maxInt--; } } - } else if (queryBuilder.fieldName().equals(DATE_RANGE_FIELD_NAME) || queryBuilder.fieldName().equals(INT_RANGE_FIELD_NAME)) { + } else if (expectedFieldName.equals(DATE_RANGE_FIELD_NAME) || expectedFieldName.equals(INT_RANGE_FIELD_NAME)) { // todo can't check RangeFieldQuery because its currently package private (this will change) } else { throw new UnsupportedOperationException(); diff --git a/server/src/test/java/org/elasticsearch/index/query/RegexpQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RegexpQueryBuilderTests.java index efbb84c3239dd..1110388090a3e 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RegexpQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RegexpQueryBuilderTests.java @@ -71,7 +71,7 @@ protected Map getAlternateVersions() { private static RegexpQueryBuilder randomRegexpQuery() { // mapped or unmapped fields - String fieldName = randomBoolean() ? STRING_FIELD_NAME : randomAlphaOfLengthBetween(1, 10); + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, randomAlphaOfLengthBetween(1, 10)); String value = randomAlphaOfLengthBetween(1, 10); return new RegexpQueryBuilder(fieldName, value); } @@ -80,7 +80,9 @@ private static RegexpQueryBuilder randomRegexpQuery() { protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { assertThat(query, instanceOf(RegexpQuery.class)); RegexpQuery regexpQuery = (RegexpQuery) query; - assertThat(regexpQuery.getField(), equalTo(queryBuilder.fieldName())); + + String expectedFieldName = expectedFieldName( queryBuilder.fieldName()); + assertThat(regexpQuery.getField(), equalTo(expectedFieldName)); } public void testIllegalArguments() { diff --git a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index 6be8d26a9d50b..3c508ddc8beb6 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -98,7 +98,8 @@ protected SimpleQueryStringBuilder doCreateTestQueryBuilder() { Map fields = new HashMap<>(); for (int i = 0; i < fieldCount; i++) { if (randomBoolean()) { - fields.put(STRING_FIELD_NAME, AbstractQueryBuilder.DEFAULT_BOOST); + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME); + fields.put(fieldName, AbstractQueryBuilder.DEFAULT_BOOST); } else { fields.put(STRING_FIELD_NAME_2, 2.0f / randomIntBetween(1, 20)); } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index c93df5b751942..7c459737c771c 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -63,7 +63,11 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .field("type", "text") .startObject("index_prefixes").endObject() .endObject() - .endObject().endObject().endObject(); + .startObject("prefix_field_alias") + .field("type", "alias") + .field("path", "prefix_field") + .endObject() + .endObject().endObject().endObject(); mapperService.merge("_doc", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); @@ -168,16 +172,16 @@ public void testUnsupportedInnerQueryType() throws IOException { } public void testToQueryInnerSpanMultiTerm() throws IOException { - Query query = new SpanOrQueryBuilder(createTestQueryBuilder()).toQuery(createShardContext()); //verify that the result is still a span query, despite the boost that might get set (SpanBoostQuery rather than BoostQuery) assertThat(query, instanceOf(SpanQuery.class)); } public void testToQueryInnerTermQuery() throws IOException { + String fieldName = randomFrom("prefix_field", "prefix_field_alias"); final QueryShardContext context = createShardContext(); if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { - Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder("prefix_field", "foo")) + Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")) .toQuery(context); assertThat(query, instanceOf(FieldMaskingSpanQuery.class)); FieldMaskingSpanQuery fieldSpanQuery = (FieldMaskingSpanQuery) query; @@ -186,7 +190,7 @@ public void testToQueryInnerTermQuery() throws IOException { SpanTermQuery spanTermQuery = (SpanTermQuery) fieldSpanQuery.getMaskedQuery(); assertThat(spanTermQuery.getTerm().text(), equalTo("foo")); - query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder("prefix_field", "foo")) + query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")) .boost(2.0f) .toQuery(context); assertThat(query, instanceOf(SpanBoostQuery.class)); @@ -199,7 +203,7 @@ public void testToQueryInnerTermQuery() throws IOException { spanTermQuery = (SpanTermQuery) fieldSpanQuery.getMaskedQuery(); assertThat(spanTermQuery.getTerm().text(), equalTo("foo")); } else { - Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder("prefix_field", "foo")) + Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")) .toQuery(context); assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class)); SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) query; @@ -208,7 +212,7 @@ public void testToQueryInnerTermQuery() throws IOException { assertThat(prefixQuery.getField(), equalTo("prefix_field")); assertThat(prefixQuery.getPrefix().text(), equalTo("foo")); - query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder("prefix_field", "foo")) + query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")) .boost(2.0f) .toQuery(context); assertThat(query, instanceOf(SpanBoostQuery.class)); diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java index d21a18c083977..34dc08d29fb9f 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java @@ -38,12 +38,11 @@ public class SpanTermQueryBuilderTests extends AbstractTermQueryTestCase choice.equals(GEO_POINT_FIELD_NAME) || choice.equals(GEO_SHAPE_FIELD_NAME) - || choice.equals(INT_RANGE_FIELD_NAME) || choice.equals(DATE_RANGE_FIELD_NAME), () -> getRandomFieldName()); + String fieldName = randomValueOtherThanMany(choice -> + choice.equals(GEO_POINT_FIELD_NAME) || + choice.equals(GEO_POINT_ALIAS_FIELD_NAME) || + choice.equals(GEO_SHAPE_FIELD_NAME) || + choice.equals(INT_RANGE_FIELD_NAME) || + choice.equals(DATE_RANGE_FIELD_NAME), + () -> getRandomFieldName()); Object[] values = new Object[randomInt(5)]; for (int i = 0; i < values.length; i++) { values[i] = getRandomValueForFieldName(fieldName); @@ -129,7 +133,8 @@ protected void doAssertLuceneQuery(TermsQueryBuilder queryBuilder, Query query, terms = queryBuilder.values(); } - TermInSetQuery expected = new TermInSetQuery(queryBuilder.fieldName(), + String fieldName = expectedFieldName(queryBuilder.fieldName()); + TermInSetQuery expected = new TermInSetQuery(fieldName, terms.stream().filter(Objects::nonNull).map(Object::toString).map(BytesRef::new).collect(Collectors.toList())); assertEquals(expected, query); } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index b3cd1a361ecde..cfc423d918ad7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -28,12 +28,14 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.Term; import org.apache.lucene.search.CoveringQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -83,10 +85,9 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws @Override protected TermsSetQueryBuilder doCreateTestQueryBuilder() { - String fieldName; - do { - fieldName = randomFrom(MAPPED_FIELD_NAMES); - } while (fieldName.equals(GEO_POINT_FIELD_NAME) || fieldName.equals(GEO_SHAPE_FIELD_NAME)); + String fieldName = randomValueOtherThanMany( + value -> value.equals(GEO_POINT_FIELD_NAME) || value.equals(GEO_SHAPE_FIELD_NAME), + () -> randomFrom(MAPPED_FIELD_NAMES)); List randomTerms = randomValues(fieldName); TermsSetQueryBuilder queryBuilder = new TermsSetQueryBuilder(STRING_FIELD_NAME, randomTerms); if (randomBoolean()) { @@ -261,6 +262,22 @@ public void testDoToQuery_msmScriptField() throws Exception { } } + public void testFieldAlias() { + List randomTerms = Arrays.asList(generateRandomStringArray(5, 10, false, false)); + TermsSetQueryBuilder queryBuilder = new TermsSetQueryBuilder(STRING_ALIAS_FIELD_NAME, randomTerms) + .setMinimumShouldMatchField("m_s_m"); + + QueryShardContext context = createShardContext(); + List termQueries = queryBuilder.createTermQueries(context); + assertEquals(randomTerms.size(), termQueries.size()); + + String expectedFieldName = expectedFieldName(queryBuilder.getFieldName()); + for (int i = 0; i < randomTerms.size(); i++) { + Term term = new Term(expectedFieldName, randomTerms.get(i)); + assertThat(termQueries.get(i), equalTo(new TermQuery(term))); + } + } + private static List randomValues(final String fieldName) { final int numValues = randomIntBetween(0, 10); final List values = new ArrayList<>(numValues); diff --git a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java index 7da423de25b05..48f43eefeb3ca 100644 --- a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java @@ -60,21 +60,22 @@ protected Map getAlternateVersions() { } private static WildcardQueryBuilder randomWildcardQuery() { - // mapped or unmapped field + String fieldName = randomFrom(STRING_FIELD_NAME, + STRING_ALIAS_FIELD_NAME, + randomAlphaOfLengthBetween(1, 10)); String text = randomAlphaOfLengthBetween(1, 10); - if (randomBoolean()) { - return new WildcardQueryBuilder(STRING_FIELD_NAME, text); - } else { - return new WildcardQueryBuilder(randomAlphaOfLengthBetween(1, 10), text); - } + + return new WildcardQueryBuilder(fieldName, text); } @Override protected void doAssertLuceneQuery(WildcardQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { assertThat(query, instanceOf(WildcardQuery.class)); WildcardQuery wildcardQuery = (WildcardQuery) query; - assertThat(wildcardQuery.getField(), equalTo(queryBuilder.fieldName())); - assertThat(wildcardQuery.getTerm().field(), equalTo(queryBuilder.fieldName())); + + String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); + assertThat(wildcardQuery.getField(), equalTo(expectedFieldName)); + assertThat(wildcardQuery.getTerm().field(), equalTo(expectedFieldName)); assertThat(wildcardQuery.getTerm().text(), equalTo(queryBuilder.value())); } @@ -138,19 +139,19 @@ public void testWithMetaDataField() throws IOException { assertEquals(expected, query); } } - + public void testIndexWildcard() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); QueryShardContext context = createShardContext(); String index = context.getFullyQualifiedIndexName(); - + Query query = new WildcardQueryBuilder("_index", index).doToQuery(context); assertThat(query instanceof MatchAllDocsQuery, equalTo(true)); - + query = new WildcardQueryBuilder("_index", index + "*").doToQuery(context); assertThat(query instanceof MatchAllDocsQuery, equalTo(true)); - + query = new WildcardQueryBuilder("_index", "index_" + index + "*").doToQuery(context); assertThat(query instanceof MatchNoDocsQuery, equalTo(true)); } diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index b4cc033ab40c4..a6e7d7d23543f 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -385,7 +385,7 @@ public void testSeqNoCollision() throws Exception { logger.info("--> Promote replica2 as the primary"); shards.promoteReplicaToPrimary(replica2); logger.info("--> Recover replica3 from replica2"); - recoverReplica(replica3, replica2); + recoverReplica(replica3, replica2, true); try (Translog.Snapshot snapshot = getTranslog(replica3).newSnapshot()) { assertThat(snapshot.totalOperations(), equalTo(initDocs + 1)); final List expectedOps = new ArrayList<>(initOperations); diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index bcde44832fcb8..9d5a53ea0ca8c 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -187,7 +187,6 @@ public void testRecoveryToReplicaThatReceivedExtraDocument() throws Exception { remainingReplica.applyIndexOperationOnReplica( remainingReplica.getLocalCheckpoint() + 1, 1, - VersionType.EXTERNAL, randomNonNegativeLong(), false, SourceToParse.source("index", "type", "replica", new BytesArray("{}"), XContentType.JSON)); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index d8efe734fe6da..703ba237daa75 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -75,7 +75,6 @@ import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; @@ -236,6 +235,7 @@ public void testPersistenceStateMetadataPersistence() throws Exception { } public void testFailShard() throws Exception { + allowShardFailures(); IndexShard shard = newStartedShard(); final ShardPath shardPath = shard.shardPath(); assertNotNull(shardPath); @@ -320,7 +320,8 @@ public void testRejectOperationPermitWithHigherTermWhenNotStarted() throws IOExc } public void testPrimaryPromotionDelaysOperations() throws IOException, BrokenBarrierException, InterruptedException { - final IndexShard indexShard = newStartedShard(false); + final IndexShard indexShard = newShard(false); + recoveryEmptyReplica(indexShard, randomBoolean()); final int operations = scaledRandomIntBetween(1, 64); final CyclicBarrier barrier = new CyclicBarrier(1 + operations); @@ -364,20 +365,10 @@ public void onFailure(Exception e) { barrier.await(); latch.await(); - // promote the replica final ShardRouting replicaRouting = indexShard.routingEntry(); - final ShardRouting primaryRouting = - newShardRouting( - replicaRouting.shardId(), - replicaRouting.currentNodeId(), - null, - true, - ShardRoutingState.STARTED, - replicaRouting.allocationId()); - indexShard.updateShardState(primaryRouting, indexShard.getPrimaryTerm() + 1, (shard, listener) -> {}, - 0L, Collections.singleton(primaryRouting.allocationId().getId()), - new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(), - Collections.emptySet()); + promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), + new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); + final int delayedOperations = scaledRandomIntBetween(1, 64); final CyclicBarrier delayedOperationsBarrier = new CyclicBarrier(1 + delayedOperations); @@ -439,8 +430,9 @@ public void onFailure(Exception e) { * 1) Internal state (ala ReplicationTracker) have been updated * 2) Primary term is set to the new term */ - public void testPublishingOrderOnPromotion() throws IOException, BrokenBarrierException, InterruptedException { - final IndexShard indexShard = newStartedShard(false); + public void testPublishingOrderOnPromotion() throws IOException, InterruptedException, BrokenBarrierException { + final IndexShard indexShard = newShard(false); + recoveryEmptyReplica(indexShard, randomBoolean()); final long promotedTerm = indexShard.getPrimaryTerm() + 1; final CyclicBarrier barrier = new CyclicBarrier(2); final AtomicBoolean stop = new AtomicBoolean(); @@ -459,18 +451,10 @@ public void testPublishingOrderOnPromotion() throws IOException, BrokenBarrierEx }); thread.start(); - final ShardRouting replicaRouting = indexShard.routingEntry(); - final ShardRouting primaryRouting = newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), null, true, - ShardRoutingState.STARTED, replicaRouting.allocationId()); - - - final Set inSyncAllocationIds = Collections.singleton(primaryRouting.allocationId().getId()); - final IndexShardRoutingTable routingTable = - new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(); barrier.await(); - // promote the replica - indexShard.updateShardState(primaryRouting, promotedTerm, (shard, listener) -> {}, 0L, inSyncAllocationIds, routingTable, - Collections.emptySet()); + final ShardRouting replicaRouting = indexShard.routingEntry(); + promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), + new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); stop.set(true); thread.join(); @@ -479,7 +463,8 @@ public void testPublishingOrderOnPromotion() throws IOException, BrokenBarrierEx public void testPrimaryFillsSeqNoGapsOnPromotion() throws Exception { - final IndexShard indexShard = newStartedShard(false); + final IndexShard indexShard = newShard(false); + recoveryEmptyReplica(indexShard, randomBoolean()); // most of the time this is large enough that most of the time there will be at least one gap final int operations = 1024 - scaledRandomIntBetween(0, 1024); @@ -490,17 +475,8 @@ public void testPrimaryFillsSeqNoGapsOnPromotion() throws Exception { // promote the replica final ShardRouting replicaRouting = indexShard.routingEntry(); - final ShardRouting primaryRouting = - newShardRouting( - replicaRouting.shardId(), - replicaRouting.currentNodeId(), - null, - true, - ShardRoutingState.STARTED, - replicaRouting.allocationId()); - indexShard.updateShardState(primaryRouting, indexShard.getPrimaryTerm() + 1, (shard, listener) -> {}, - 0L, Collections.singleton(primaryRouting.allocationId().getId()), - new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(), Collections.emptySet()); + promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), + new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); /* * This operation completing means that the delay operation executed as part of increasing the primary term has completed and the @@ -517,7 +493,7 @@ public void onResponse(Releasable releasable) { @Override public void onFailure(Exception e) { - throw new RuntimeException(e); + throw new AssertionError(e); } }, ThreadPool.Names.GENERIC, ""); @@ -857,7 +833,7 @@ public void testGlobalCheckpointSync() throws IOException { // add a replica recoverShardFromStore(primaryShard); final IndexShard replicaShard = newShard(shardId, false); - recoverReplica(replicaShard, primaryShard); + recoverReplica(replicaShard, primaryShard, true); final int maxSeqNo = randomIntBetween(0, 128); for (int i = 0; i <= maxSeqNo; i++) { EngineTestCase.generateNewSeqNo(primaryShard.getEngine()); @@ -1555,17 +1531,17 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { * - If flush and then recover from the existing store, delete #1 will be removed while index #0 is still retained and replayed. */ final IndexShard shard = newStartedShard(false); - shard.applyDeleteOperationOnReplica(1, 2, "_doc", "id", VersionType.EXTERNAL); + shard.applyDeleteOperationOnReplica(1, 2, "_doc", "id"); shard.getEngine().rollTranslogGeneration(); // isolate the delete in it's own generation - shard.applyIndexOperationOnReplica(0, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id", new BytesArray("{}"), XContentType.JSON)); - shard.applyIndexOperationOnReplica(3, 3, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(3, 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-3", new BytesArray("{}"), XContentType.JSON)); // Flushing a new commit with local checkpoint=1 allows to skip the translog gen #1 in recovery. shard.flush(new FlushRequest().force(true).waitIfOngoing(true)); - shard.applyIndexOperationOnReplica(2, 3, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(2, 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-2", new BytesArray("{}"), XContentType.JSON)); - shard.applyIndexOperationOnReplica(5, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(5, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-5", new BytesArray("{}"), XContentType.JSON)); final int translogOps; @@ -1636,7 +1612,7 @@ public void testPrimaryHandOffUpdatesLocalCheckpoint() throws IOException { IndexShardTestCase.updateRoutingEntry(primarySource, primarySource.routingEntry().relocate(randomAlphaOfLength(10), -1)); final IndexShard primaryTarget = newShard(primarySource.routingEntry().getTargetRelocatingShard()); updateMappings(primaryTarget, primarySource.indexSettings().getIndexMetaData()); - recoverReplica(primaryTarget, primarySource); + recoverReplica(primaryTarget, primarySource, true); // check that local checkpoint of new primary is properly tracked after primary relocation assertThat(primaryTarget.getLocalCheckpoint(), equalTo(totalOps - 1L)); @@ -1656,8 +1632,7 @@ public void testRecoverFromStoreWithNoOps() throws IOException { updateMappings(otherShard, shard.indexSettings().getIndexMetaData()); SourceToParse sourceToParse = SourceToParse.source(shard.shardId().getIndexName(), "_doc", "1", new BytesArray("{}"), XContentType.JSON); - otherShard.applyIndexOperationOnReplica(1, 1, - VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + otherShard.applyIndexOperationOnReplica(1, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); final ShardRouting primaryShardRouting = shard.routingEntry(); IndexShard newShard = reinitShard(otherShard, ShardRoutingHelper.initWithSameId(primaryShardRouting, @@ -1773,18 +1748,18 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception { final IndexShard shard = newStartedShard(false); final String indexName = shard.shardId().getIndexName(); // Index #0, index #1 - shard.applyIndexOperationOnReplica(0, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-0", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); shard.updateGlobalCheckpointOnReplica(0, "test"); // stick the global checkpoint here. - shard.applyIndexOperationOnReplica(1, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(1, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-1", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1")); // Simulate resync (without rollback): Noop #1, index #2 acquireReplicaOperationPermitBlockingly(shard, shard.primaryTerm + 1); shard.markSeqNoAsNoop(1, "test"); - shard.applyIndexOperationOnReplica(2, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(2, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-2", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1", "doc-2")); @@ -2094,7 +2069,7 @@ public long indexTranslogOperations(List operations, int tot assertFalse(replica.isSyncNeeded()); return localCheckpoint; } - }, true); + }, true, true); closeShards(primary, replica); } @@ -2114,11 +2089,11 @@ public void testRecoverFromTranslog() throws IOException { int numCorruptEntries = 0; for (int i = 0; i < numTotalEntries; i++) { if (randomBoolean()) { - operations.add(new Translog.Index("_doc", "1", 0, primary.getPrimaryTerm(), 1, VersionType.INTERNAL, + operations.add(new Translog.Index("_doc", "1", 0, primary.getPrimaryTerm(), 1, "{\"foo\" : \"bar\"}".getBytes(Charset.forName("UTF-8")), null, -1)); } else { // corrupt entry - operations.add(new Translog.Index("_doc", "2", 1, primary.getPrimaryTerm(), 1, VersionType.INTERNAL, + operations.add(new Translog.Index("_doc", "2", 1, primary.getPrimaryTerm(), 1, "{\"foo\" : \"bar}".getBytes(Charset.forName("UTF-8")), null, -1)); numCorruptEntries++; } @@ -2201,7 +2176,7 @@ public long indexTranslogOperations(List operations, int tot assertTrue(replica.isActive()); return localCheckpoint; } - }, false); + }, false, true); closeShards(primary, replica); } @@ -2253,7 +2228,7 @@ public void finalizeRecovery(long globalCheckpoint) throws IOException { super.finalizeRecovery(globalCheckpoint); assertListenerCalled.accept(replica); } - }, false); + }, false, true); closeShards(primary, replica); } @@ -2630,8 +2605,7 @@ private Result indexOnReplicaWithGaps( final String id = Integer.toString(i); SourceToParse sourceToParse = SourceToParse.source(indexShard.shardId().getIndexName(), "_doc", id, new BytesArray("{}"), XContentType.JSON); - indexShard.applyIndexOperationOnReplica(i, - 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + indexShard.applyIndexOperationOnReplica(i, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); if (!gap && i == localCheckpoint + 1) { localCheckpoint++; } diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index b290f4d45597b..4444f475329b3 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -123,12 +123,10 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { public void testSyncerOnClosingShard() throws Exception { IndexShard shard = newStartedShard(true); AtomicBoolean syncActionCalled = new AtomicBoolean(); - CountDownLatch syncCalledLatch = new CountDownLatch(1); PrimaryReplicaSyncer.SyncAction syncAction = (request, parentTask, allocationId, primaryTerm, listener) -> { logger.info("Sending off {} operations", request.getOperations().length); syncActionCalled.set(true); - syncCalledLatch.countDown(); threadPool.generic().execute(() -> listener.onResponse(new ResyncReplicationResponse())); }; PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(Settings.EMPTY, @@ -147,13 +145,27 @@ public void testSyncerOnClosingShard() throws Exception { shard.updateShardState(shard.routingEntry(), shard.getPrimaryTerm(), null, 1000L, Collections.singleton(allocationId), new IndexShardRoutingTable.Builder(shard.shardId()).addShard(shard.routingEntry()).build(), Collections.emptySet()); - PlainActionFuture fut = new PlainActionFuture<>(); - threadPool.generic().execute(() -> { - try { - syncer.resync(shard, fut); - } catch (AlreadyClosedException ace) { - fut.onFailure(ace); + CountDownLatch syncCalledLatch = new CountDownLatch(1); + PlainActionFuture fut = new PlainActionFuture() { + @Override + public void onFailure(Exception e) { + try { + super.onFailure(e); + } finally { + syncCalledLatch.countDown(); + } + } + @Override + public void onResponse(PrimaryReplicaSyncer.ResyncTask result) { + try { + super.onResponse(result); + } finally { + syncCalledLatch.countDown(); + } } + }; + threadPool.generic().execute(() -> { + syncer.resync(shard, fut); }); if (randomBoolean()) { syncCalledLatch.await(); diff --git a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index 3de02f6831837..6102a1b55f1a6 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -38,11 +38,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -86,22 +87,21 @@ public void testResolveDefaultSimilaritiesOn6xIndex() { } public void testResolveSimilaritiesFromMapping_classic() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "classic") .put("index.similarity.my_similarity.discount_overlaps", false) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_3_0) // otherwise classic is forbidden .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), instanceOf(ClassicSimilarity.class)); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(ClassicSimilarity.class)); - ClassicSimilarity similarity = (ClassicSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + ClassicSimilarity similarity = (ClassicSimilarity) mapperService.fullName("field1").similarity().get(); assertThat(similarity.getDiscountOverlaps(), equalTo(false)); } @@ -117,11 +117,11 @@ public void testResolveSimilaritiesFromMapping_classicIsForbidden() throws IOExc } public void testResolveSimilaritiesFromMapping_bm25() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "BM25") @@ -129,37 +129,32 @@ public void testResolveSimilaritiesFromMapping_bm25() throws IOException { .put("index.similarity.my_similarity.b", 0.5f) .put("index.similarity.my_similarity.discount_overlaps", false) .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), instanceOf(BM25Similarity.class)); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(BM25Similarity.class)); - BM25Similarity similarity = (BM25Similarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + BM25Similarity similarity = (BM25Similarity) mapperService.fullName("field1").similarity().get(); assertThat(similarity.getK1(), equalTo(2.0f)); assertThat(similarity.getB(), equalTo(0.5f)); assertThat(similarity.getDiscountOverlaps(), equalTo(false)); } public void testResolveSimilaritiesFromMapping_boolean() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "boolean").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); - IndexService indexService = createIndex("foo", Settings.EMPTY); - DocumentMapper documentMapper = indexService.mapperService() - .documentMapperParser() - .parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), - instanceOf(BooleanSimilarity.class)); + MapperService mapperService = createIndex("foo", Settings.EMPTY, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(BooleanSimilarity.class)); } public void testResolveSimilaritiesFromMapping_DFR() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "DFR") @@ -168,11 +163,10 @@ public void testResolveSimilaritiesFromMapping_DFR() throws IOException { .put("index.similarity.my_similarity.normalization", "h2") .put("index.similarity.my_similarity.normalization.h2.c", 3f) .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), instanceOf(DFRSimilarity.class)); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(DFRSimilarity.class)); - DFRSimilarity similarity = (DFRSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + DFRSimilarity similarity = (DFRSimilarity) mapperService.fullName("field1").similarity().get(); assertThat(similarity.getBasicModel(), instanceOf(BasicModelG.class)); assertThat(similarity.getAfterEffect(), instanceOf(AfterEffectL.class)); assertThat(similarity.getNormalization(), instanceOf(NormalizationH2.class)); @@ -180,11 +174,11 @@ public void testResolveSimilaritiesFromMapping_DFR() throws IOException { } public void testResolveSimilaritiesFromMapping_IB() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "IB") @@ -193,11 +187,10 @@ public void testResolveSimilaritiesFromMapping_IB() throws IOException { .put("index.similarity.my_similarity.normalization", "h2") .put("index.similarity.my_similarity.normalization.h2.c", 3f) .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), instanceOf(IBSimilarity.class)); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(IBSimilarity.class)); - IBSimilarity similarity = (IBSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + IBSimilarity similarity = (IBSimilarity) mapperService.fullName("field1").similarity().get(); assertThat(similarity.getDistribution(), instanceOf(DistributionSPL.class)); assertThat(similarity.getLambda(), instanceOf(LambdaTTF.class)); assertThat(similarity.getNormalization(), instanceOf(NormalizationH2.class)); @@ -205,59 +198,58 @@ public void testResolveSimilaritiesFromMapping_IB() throws IOException { } public void testResolveSimilaritiesFromMapping_DFI() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "DFI") .put("index.similarity.my_similarity.independence_measure", "chisquared") .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - MappedFieldType fieldType = documentMapper.mappers().getMapper("field1").fieldType(); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + MappedFieldType fieldType = mapperService.fullName("field1"); + assertThat(fieldType.similarity().get(), instanceOf(DFISimilarity.class)); DFISimilarity similarity = (DFISimilarity) fieldType.similarity().get(); assertThat(similarity.getIndependence(), instanceOf(IndependenceChiSquared.class)); } public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "LMDirichlet") .put("index.similarity.my_similarity.mu", 3000f) .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), instanceOf(LMDirichletSimilarity.class)); - LMDirichletSimilarity similarity = (LMDirichletSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(LMDirichletSimilarity.class)); + + LMDirichletSimilarity similarity = (LMDirichletSimilarity) mapperService.fullName("field1").similarity().get(); assertThat(similarity.getMu(), equalTo(3000f)); } public void testResolveSimilaritiesFromMapping_LMJelinekMercer() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "LMJelinekMercer") .put("index.similarity.my_similarity.lambda", 0.7f) .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), instanceOf(LMJelinekMercerSimilarity.class)); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(LMJelinekMercerSimilarity.class)); - LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) mapperService.fullName("field1").similarity().get(); assertThat(similarity.getLambda(), equalTo(0.7f)); } diff --git a/server/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java b/server/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java index e5adbde71eb31..034e7daaf7f99 100644 --- a/server/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java @@ -109,12 +109,62 @@ public void testDocFreqs() throws IOException { IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); assertThat(shard, notNullValue()); - TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request); + TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request); + assertEquals(1, response.getFields().size()); Terms terms = response.getFields().terms("text"); TermsEnum iterator = terms.iterator(); while (iterator.next() != null) { assertEquals(max, iterator.docFreq()); } - } + } + + public void testWithIndexedPhrases() throws IOException { + XContentBuilder mapping = jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("text") + .field("type", "text") + .field("index_phrases", true) + .field("term_vector", "with_positions_offsets_payloads") + .endObject() + .endObject() + .endObject() + .endObject(); + Settings settings = Settings.builder() + .put("number_of_shards", 1) + .build(); + createIndex("test", settings, "_doc", mapping); + ensureGreen(); + + int max = between(3, 10); + BulkRequestBuilder bulk = client().prepareBulk(); + for (int i = 0; i < max; i++) { + bulk.add(client().prepareIndex("test", "_doc", Integer.toString(i)) + .setSource("text", "the quick brown fox jumped over the lazy dog")); + } + bulk.get(); + + TermVectorsRequest request = new TermVectorsRequest("test", "_doc", "0").termStatistics(true); + + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService test = indicesService.indexService(resolveIndex("test")); + IndexShard shard = test.getShardOrNull(0); + assertThat(shard, notNullValue()); + TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request); + assertEquals(2, response.getFields().size()); + + Terms terms = response.getFields().terms("text"); + TermsEnum iterator = terms.iterator(); + while (iterator.next() != null) { + assertEquals(max, iterator.docFreq()); + } + + Terms phrases = response.getFields().terms("text._index_phrase"); + TermsEnum phraseIterator = phrases.iterator(); + while (phraseIterator.next() != null) { + assertEquals(max, phraseIterator.docFreq()); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index cf6e753684676..b255238c8648c 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -416,9 +416,9 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(1)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(163L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(162L)); assertThat(stats.getUncommittedOperations(), equalTo(1)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(163L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(162L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(1L)); } @@ -426,9 +426,9 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(2)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(212L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(210L)); assertThat(stats.getUncommittedOperations(), equalTo(2)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(212L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(210L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(1L)); } @@ -436,9 +436,9 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(3)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(261L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(258L)); assertThat(stats.getUncommittedOperations(), equalTo(3)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(261L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(258L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(1L)); } @@ -446,13 +446,13 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(303L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(300L)); assertThat(stats.getUncommittedOperations(), equalTo(4)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(303L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(300L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(1L)); } - final long expectedSizeInBytes = 358L; + final long expectedSizeInBytes = 355L; translog.rollGeneration(); { final TranslogStats stats = stats(); @@ -725,14 +725,12 @@ public void testConcurrentWritesWithVaryingSize() throws Throwable { assertEquals(expIndexOp.type(), indexOp.type()); assertEquals(expIndexOp.source(), indexOp.source()); assertEquals(expIndexOp.version(), indexOp.version()); - assertEquals(expIndexOp.versionType(), indexOp.versionType()); break; case DELETE: Translog.Delete delOp = (Translog.Delete) op; Translog.Delete expDelOp = (Translog.Delete) expectedOp; assertEquals(expDelOp.uid(), delOp.uid()); assertEquals(expDelOp.version(), delOp.version()); - assertEquals(expDelOp.versionType(), delOp.versionType()); break; case NO_OP: final Translog.NoOp noOp = (Translog.NoOp) op; @@ -1478,7 +1476,7 @@ public void testRecoveryUncommittedCorruptedCheckpoint() throws IOException { try (Translog ignored = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get)) { fail("corrupted"); } catch (IllegalStateException ex) { - assertEquals("Checkpoint file translog-3.ckp already exists but has corrupted content expected: Checkpoint{offset=3080, " + + assertEquals("Checkpoint file translog-3.ckp already exists but has corrupted content expected: Checkpoint{offset=3025, " + "numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1, trimmedAboveSeqNo=-2} but got: Checkpoint{offset=0, numOps=0, " + "generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, minTranslogGeneration=0, trimmedAboveSeqNo=-2}", ex.getMessage()); } @@ -1842,8 +1840,7 @@ public void run() { new Term("_uid", threadId + "_" + opCount), seqNoGenerator.getAndIncrement(), primaryTerm.get(), - 1 + randomInt(100000), - randomFrom(VersionType.values())); + 1 + randomInt(100000)); break; case NO_OP: op = new Translog.NoOp(seqNoGenerator.getAndIncrement(), primaryTerm.get(), randomAlphaOfLength(16)); @@ -2932,6 +2929,24 @@ public void testSnapshotDedupOperations() throws Exception { } } + /** Make sure that it's ok to close a translog snapshot multiple times */ + public void testCloseSnapshotTwice() throws Exception { + int numOps = between(0, 10); + for (int i = 0; i < numOps; i++) { + Translog.Index op = new Translog.Index("doc", randomAlphaOfLength(10), i, primaryTerm.get(), new byte[]{1}); + translog.add(op); + if (randomBoolean()) { + translog.rollGeneration(); + } + } + for (int i = 0; i < 5; i++) { + Translog.Snapshot snapshot = translog.newSnapshot(); + assertThat(snapshot, SnapshotMatchers.size(numOps)); + snapshot.close(); + snapshot.close(); + } + } + static class SortedSnapshot implements Translog.Snapshot { private final Translog.Snapshot snapshot; private List operations = null; diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java index 35bbc497838f2..5c6b000f7e519 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java @@ -357,6 +357,14 @@ public void updateShardState(ShardRouting shardRouting, assertTrue("and active shard must stay active, current: " + this.shardRouting + ", got: " + shardRouting, shardRouting.active()); } + if (this.shardRouting.primary()) { + assertTrue("a primary shard can't be demoted", shardRouting.primary()); + } else if (shardRouting.primary()) { + // note: it's ok for a replica in post recovery to be started and promoted at once + // this can happen when the primary failed after we sent the start shard message + assertTrue("a replica can only be promoted when active. current: " + this.shardRouting + " new: " + shardRouting, + shardRouting.active()); + } this.shardRouting = shardRouting; if (shardRouting.primary()) { term = newPrimaryTerm; diff --git a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java index 2ba943ba0dc4b..788777ade7dab 100644 --- a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.indices.mapping; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -68,10 +69,26 @@ public void testGetMappingsWhereThereAreNone() { } private XContentBuilder getMappingForType(String type) throws IOException { - return jsonBuilder().startObject().startObject(type).startObject("properties") - .startObject("field1").field("type", "text").endObject() - .startObject("obj").startObject("properties").startObject("subfield").field("type", "keyword").endObject().endObject().endObject() - .endObject().endObject().endObject(); + return jsonBuilder().startObject() + .startObject(type) + .startObject("properties") + .startObject("field1") + .field("type", "text") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "field1") + .endObject() + .startObject("obj") + .startObject("properties") + .startObject("subfield") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); } public void testGetFieldMappings() throws Exception { @@ -138,8 +155,23 @@ public void testSimpleGetFieldMappingsWithDefaults() throws Exception { assertThat((Map) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("index", Boolean.TRUE)); assertThat((Map) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("type", (Object) "text")); assertThat((Map) response.fieldMappings("test", "type", "obj.subfield").sourceAsMap().get("subfield"), hasEntry("type", (Object) "keyword")); + } + + @SuppressWarnings("unchecked") + public void testGetFieldMappingsWithFieldAlias() throws Exception { + assertAcked(prepareCreate("test").addMapping("type", getMappingForType("type"))); + + GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings() + .setFields("alias", "field1").get(); + FieldMappingMetaData aliasMapping = response.fieldMappings("test", "type", "alias"); + assertThat(aliasMapping.fullName(), equalTo("alias")); + assertThat(aliasMapping.sourceAsMap(), hasKey("alias")); + assertThat((Map) aliasMapping.sourceAsMap().get("alias"), hasEntry("type", "alias")); + FieldMappingMetaData field1Mapping = response.fieldMappings("test", "type", "field1"); + assertThat(field1Mapping.fullName(), equalTo("field1")); + assertThat(field1Mapping.sourceAsMap(), hasKey("field1")); } //fix #6552 diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java index 7b1003a862481..51698d443925b 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java @@ -44,7 +44,7 @@ public void testGetStartingSeqNo() throws Exception { try { // Empty store { - recoveryEmptyReplica(replica); + recoveryEmptyReplica(replica, true); final RecoveryTarget recoveryTarget = new RecoveryTarget(replica, null, null, null); assertThat(PeerRecoveryTargetService.getStartingSeqNo(logger, recoveryTarget), equalTo(0L)); recoveryTarget.decRef(); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index 6ece124daeb79..dae5154776523 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -123,22 +123,22 @@ public void testRecoveryWithOutOfOrderDelete() throws Exception { final String indexName = orgReplica.shardId().getIndexName(); // delete #1 - orgReplica.applyDeleteOperationOnReplica(1, 2, "type", "id", VersionType.EXTERNAL); + orgReplica.applyDeleteOperationOnReplica(1, 2, "type", "id"); orgReplica.flush(new FlushRequest().force(true)); // isolate delete#1 in its own translog generation and lucene segment // index #0 - orgReplica.applyIndexOperationOnReplica(0, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + orgReplica.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "type", "id", new BytesArray("{}"), XContentType.JSON)); // index #3 - orgReplica.applyIndexOperationOnReplica(3, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + orgReplica.applyIndexOperationOnReplica(3, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "type", "id-3", new BytesArray("{}"), XContentType.JSON)); // Flushing a new commit with local checkpoint=1 allows to delete the translog gen #1. orgReplica.flush(new FlushRequest().force(true).waitIfOngoing(true)); // index #2 - orgReplica.applyIndexOperationOnReplica(2, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + orgReplica.applyIndexOperationOnReplica(2, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "type", "id-2", new BytesArray("{}"), XContentType.JSON)); orgReplica.updateGlobalCheckpointOnReplica(3L, "test"); // index #5 -> force NoOp #4. - orgReplica.applyIndexOperationOnReplica(5, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + orgReplica.applyIndexOperationOnReplica(5, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "type", "id-5", new BytesArray("{}"), XContentType.JSON)); final int translogOps; @@ -263,7 +263,7 @@ public void testPeerRecoverySendSafeCommitInFileBased() throws Exception { } IndexShard replicaShard = newShard(primaryShard.shardId(), false); updateMappings(replicaShard, primaryShard.indexSettings().getIndexMetaData()); - recoverReplica(replicaShard, primaryShard); + recoverReplica(replicaShard, primaryShard, true); List commits = DirectoryReader.listCommits(replicaShard.store().directory()); long maxSeqNo = Long.parseLong(commits.get(0).getUserData().get(SequenceNumbers.MAX_SEQ_NO)); assertThat(maxSeqNo, lessThanOrEqualTo(globalCheckpoint)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index c2a2405098dab..edc29b0d2c574 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -116,6 +116,21 @@ public void setupSuiteScopeCluster() throws Exception { .field(SINGLE_VALUED_FIELD_NAME, i * 2 - 1) .endObject())); } + + // Create two indices and add the field 'route_length_miles' as an alias in + // one, and a concrete field in the other. + prepareCreate("old_index") + .addMapping("_doc", "distance", "type=double", "route_length_miles", "type=alias,path=distance") + .get(); + prepareCreate("new_index") + .addMapping("_doc", "route_length_miles", "type=double") + .get(); + + builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 42.0)); + builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 50.5)); + builders.add(client().prepareIndex("new_index", "_doc").setSource("route_length_miles", 100.2)); + builders.add(client().prepareIndex("new_index", "_doc").setSource(Collections.emptyMap())); + indexRandom(true, builders); ensureSearchable(); } @@ -972,4 +987,72 @@ public void testDontCacheScripts() throws Exception { assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(1L)); } + + public void testFieldAlias() { + SearchResponse response = client().prepareSearch("old_index", "new_index") + .addAggregation(range("range") + .field("route_length_miles") + .addUnboundedTo(50.0) + .addRange(50.0, 150.0) + .addUnboundedFrom(150.0)) + .execute().actionGet(); + + assertSearchResponse(response); + + Range range = response.getAggregations().get("range"); + assertThat(range, notNullValue()); + assertThat(range.getName(), equalTo("range")); + List buckets = range.getBuckets(); + assertThat(buckets.size(), equalTo(3)); + + Range.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("*-50.0")); + assertThat(bucket.getDocCount(), equalTo(1L)); + + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("50.0-150.0")); + assertThat(bucket.getDocCount(), equalTo(2L)); + + bucket = buckets.get(2); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("150.0-*")); + assertThat(bucket.getDocCount(), equalTo(0L)); + } + + + public void testFieldAliasWithMissingValue() { + SearchResponse response = client().prepareSearch("old_index", "new_index") + .addAggregation(range("range") + .field("route_length_miles") + .missing(0.0) + .addUnboundedTo(50.0) + .addRange(50.0, 150.0) + .addUnboundedFrom(150.0)) + .execute().actionGet(); + + assertSearchResponse(response); + + Range range = response.getAggregations().get("range"); + assertThat(range, notNullValue()); + assertThat(range.getName(), equalTo("range")); + List buckets = range.getBuckets(); + assertThat(buckets.size(), equalTo(3)); + + Range.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("*-50.0")); + assertThat(bucket.getDocCount(), equalTo(2L)); + + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("50.0-150.0")); + assertThat(bucket.getDocCount(), equalTo(2L)); + + bucket = buckets.get(2); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("150.0-*")); + assertThat(bucket.getDocCount(), equalTo(0L)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java index aaf366c7c7b06..4a69f9d537934 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java @@ -66,6 +66,10 @@ public void setupSuiteScopeCluster() throws Exception { "type", jsonBuilder().startObject().startObject("properties") .startObject("field1").field("type", "keyword").endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "field1") + .endObject() .startObject("nested1").field("type", "nested").startObject("properties") .startObject("field2").field("type", "keyword").endObject() .endObject().endObject() @@ -649,4 +653,28 @@ public void testSameParentDocHavingMultipleBuckets() throws Exception { assertThat(barCount.getValue(), equalTo(2L)); } } + + public void testFieldAlias() { + SearchResponse response = client().prepareSearch("idx1") + .addAggregation(nested("nested1", "nested1") + .subAggregation( + terms("field2").field("nested1.field2") + .subAggregation( + reverseNested("nested1_to_field1") + .subAggregation( + terms("field1").field("alias") + .collectMode(randomFrom(SubAggCollectionMode.values())))))).get(); + + assertSearchResponse(response); + + Nested nested = response.getAggregations().get("nested1"); + Terms nestedTerms = nested.getAggregations().get("field2"); + Terms.Bucket bucket = nestedTerms.getBuckets().iterator().next(); + + ReverseNested reverseNested = bucket.getAggregations().get("nested1_to_field1"); + Terms reverseNestedTerms = reverseNested.getAggregations().get("field1"); + + assertThat(((InternalAggregation)reverseNested).getProperty("field1"), sameInstance(reverseNestedTerms)); + assertThat(reverseNestedTerms.getBuckets().size(), equalTo(6)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 11a44a1d89bad..c893e59596484 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -71,8 +71,14 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; import java.util.stream.DoubleStream; +import static org.elasticsearch.search.aggregations.AggregationBuilders.max; +import static org.elasticsearch.search.aggregations.AggregationBuilders.nested; + public class NestedAggregatorTests extends AggregatorTestCase { private static final String VALUE_FIELD_NAME = "number"; @@ -84,6 +90,15 @@ public class NestedAggregatorTests extends AggregatorTestCase { private final SeqNoFieldMapper.SequenceIDFields sequenceIDFields = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); + /** + * For each provided field type, we also register an alias with name {@code -alias}. + */ + @Override + protected Map getFieldAliases(MappedFieldType... fieldTypes) { + return Arrays.stream(fieldTypes).collect(Collectors.toMap( + ft -> ft.name() + "-alias", + Function.identity())); + } public void testNoDocs() throws IOException { try (Directory directory = newDirectory()) { @@ -638,6 +653,49 @@ public void testPreGetChildLeafCollectors() throws IOException { } } + public void testFieldAlias() throws IOException { + int numRootDocs = randomIntBetween(1, 20); + + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( + NumberFieldMapper.NumberType.LONG); + fieldType.setName(VALUE_FIELD_NAME); + + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + for (int i = 0; i < numRootDocs; i++) { + List documents = new ArrayList<>(); + int numNestedDocs = randomIntBetween(0, 20); + generateDocuments(documents, numNestedDocs, i, NESTED_OBJECT, VALUE_FIELD_NAME); + + Document document = new Document(); + document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), IdFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(TypeFieldMapper.NAME, "test", + TypeFieldMapper.Defaults.FIELD_TYPE)); + document.add(sequenceIDFields.primaryTerm); + documents.add(document); + iw.addDocuments(documents); + } + iw.commit(); + } + + try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) { + NestedAggregationBuilder agg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( + max(MAX_AGG_NAME).field(VALUE_FIELD_NAME)); + + NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( + max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias")); + + Nested nested = search(newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), agg, fieldType); + Nested aliasNested = search(newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), aliasAgg, fieldType); + + assertTrue(nested.getDocCount() > 0); + assertEquals(nested, aliasNested); + } + } + } + private double generateMaxDocs(List documents, int numNestedDocs, int id, String path, String fieldName) { return DoubleStream.of(generateDocuments(documents, numNestedDocs, id, path, fieldName)) .max().orElse(Double.NEGATIVE_INFINITY); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index 36d6c6bd6e45b..99322af2264ac 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -40,7 +40,15 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.elasticsearch.search.aggregations.AggregationBuilders.max; +import static org.elasticsearch.search.aggregations.AggregationBuilders.nested; +import static org.elasticsearch.search.aggregations.AggregationBuilders.reverseNested; public class ReverseNestedAggregatorTests extends AggregatorTestCase { @@ -50,6 +58,15 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase { private static final String REVERSE_AGG_NAME = "reverseNestedAgg"; private static final String MAX_AGG_NAME = "maxAgg"; + /** + * For each provided field type, we also register an alias with name {@code -alias}. + */ + @Override + protected Map getFieldAliases(MappedFieldType... fieldTypes) { + return Arrays.stream(fieldTypes).collect(Collectors.toMap( + ft -> ft.name() + "-alias", + Function.identity())); + } public void testNoDocs() throws IOException { try (Directory directory = newDirectory()) { @@ -150,4 +167,63 @@ public void testMaxFromParentDocs() throws IOException { } } + public void testFieldAlias() throws IOException { + int numParentDocs = randomIntBetween(1, 20); + + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( + NumberFieldMapper.NumberType.LONG); + fieldType.setName(VALUE_FIELD_NAME); + + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + for (int i = 0; i < numParentDocs; i++) { + List documents = new ArrayList<>(); + int numNestedDocs = randomIntBetween(0, 20); + for (int nested = 0; nested < numNestedDocs; nested++) { + Document document = new Document(); + document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), + IdFieldMapper.Defaults.NESTED_FIELD_TYPE)); + document.add(new Field(TypeFieldMapper.NAME, "__" + NESTED_OBJECT, + TypeFieldMapper.Defaults.FIELD_TYPE)); + documents.add(document); + } + Document document = new Document(); + document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), + IdFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(TypeFieldMapper.NAME, "test", + TypeFieldMapper.Defaults.FIELD_TYPE)); + + long value = randomNonNegativeLong() % 10000; + document.add(new SortedNumericDocValuesField(VALUE_FIELD_NAME, value)); + document.add(SeqNoFieldMapper.SequenceIDFields.emptySeqID().primaryTerm); + documents.add(document); + iw.addDocuments(documents); + } + iw.commit(); + } + + try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) { + + MaxAggregationBuilder maxAgg = max(MAX_AGG_NAME).field(VALUE_FIELD_NAME); + MaxAggregationBuilder aliasMaxAgg = max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias"); + + NestedAggregationBuilder agg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( + reverseNested(REVERSE_AGG_NAME).subAggregation(maxAgg)); + NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( + reverseNested(REVERSE_AGG_NAME).subAggregation(aliasMaxAgg)); + + Nested nested = search(newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), agg, fieldType); + Nested aliasNested = search(newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), aliasAgg, fieldType); + + ReverseNested reverseNested = nested.getAggregations().get(REVERSE_AGG_NAME); + ReverseNested aliasReverseNested = aliasNested.getAggregations().get(REVERSE_AGG_NAME); + + assertTrue(reverseNested.getDocCount() > 0); + assertEquals(reverseNested, aliasReverseNested); + } + } + } + } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorTests.java index 8515f0a899458..0485d4f58550f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorTests.java @@ -55,7 +55,13 @@ import org.junit.Before; import java.io.IOException; +import java.util.Arrays; import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; public class SignificantTermsAggregatorTests extends AggregatorTestCase { @@ -70,6 +76,16 @@ public void setUpTest() throws Exception { fieldType.setName("field"); } + /** + * For each provided field type, we also register an alias with name {@code -alias}. + */ + @Override + protected Map getFieldAliases(MappedFieldType... fieldTypes) { + return Arrays.stream(fieldTypes).collect(Collectors.toMap( + ft -> ft.name() + "-alias", + Function.identity())); + } + public void testParsedAsFilter() throws IOException { IndexReader indexReader = new MultiReader(); IndexSearcher indexSearcher = newSearcher(indexReader); @@ -104,7 +120,7 @@ public void testSignificance() throws IOException { IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment - + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { addMixedTextDocs(textFieldType, w); @@ -137,7 +153,7 @@ public void testSignificance() throws IOException { assertNull(terms.getBucketByKey("odd")); assertNull(terms.getBucketByKey("common")); assertNotNull(terms.getBucketByKey("even")); - + // Search odd with regex includeexcludes sigAgg.includeExclude(new IncludeExclude("o.d", null)); terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textFieldType); @@ -149,7 +165,7 @@ public void testSignificance() throws IOException { // Search with string-based includeexcludes String oddStrings[] = new String[] {"odd", "weird"}; String evenStrings[] = new String[] {"even", "regular"}; - + sigAgg.includeExclude(new IncludeExclude(oddStrings, evenStrings)); sigAgg.significanceHeuristic(SignificanceHeuristicTests.getRandomSignificanceheuristic()); terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textFieldType); @@ -159,7 +175,7 @@ public void testSignificance() throws IOException { assertNull(terms.getBucketByKey("common")); assertNull(terms.getBucketByKey("even")); assertNull(terms.getBucketByKey("regular")); - + sigAgg.includeExclude(new IncludeExclude(evenStrings, oddStrings)); terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textFieldType); assertEquals(0, terms.getBuckets().size()); @@ -168,7 +184,7 @@ public void testSignificance() throws IOException { assertNull(terms.getBucketByKey("common")); assertNull(terms.getBucketByKey("even")); assertNull(terms.getBucketByKey("regular")); - + } } } @@ -232,7 +248,7 @@ public void testNumericSignificance() throws IOException { } } } - + /** * Uses the significant terms aggregation on an index with unmapped field */ @@ -266,7 +282,57 @@ public void testUnmapped() throws IOException { } } - } + } + + public void testFieldAlias() throws IOException { + TextFieldType textFieldType = new TextFieldType(); + textFieldType.setName("text"); + textFieldType.setFielddata(true); + textFieldType.setIndexAnalyzer(new NamedAnalyzer("my_analyzer", AnalyzerScope.GLOBAL, new StandardAnalyzer())); + + IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); + indexWriterConfig.setMaxBufferedDocs(100); + indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment + + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { + addMixedTextDocs(textFieldType, w); + + SignificantTermsAggregationBuilder agg = significantTerms("sig_text").field("text"); + SignificantTermsAggregationBuilder aliasAgg = significantTerms("sig_text").field("text-alias"); + + String executionHint = randomExecutionHint(); + agg.executionHint(executionHint); + aliasAgg.executionHint(executionHint); + + if (randomBoolean()) { + // Use a background filter which just happens to be same scope as whole-index. + QueryBuilder backgroundFilter = QueryBuilders.termsQuery("text", "common"); + agg.backgroundFilter(backgroundFilter); + aliasAgg.backgroundFilter(backgroundFilter); + } + + try (IndexReader reader = DirectoryReader.open(w)) { + assertEquals("test expects a single segment", 1, reader.leaves().size()); + IndexSearcher searcher = new IndexSearcher(reader); + + SignificantTerms evenTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), + agg, textFieldType); + SignificantTerms aliasEvenTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), + aliasAgg, textFieldType); + + assertFalse(evenTerms.getBuckets().isEmpty()); + assertEquals(evenTerms, aliasEvenTerms); + + SignificantTerms oddTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), + agg, textFieldType); + SignificantTerms aliasOddTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), + aliasAgg, textFieldType); + + assertFalse(oddTerms.getBuckets().isEmpty()); + assertEquals(oddTerms, aliasOddTerms); + } + } + } private void addMixedTextDocs(TextFieldType textFieldType, IndexWriter w) throws IOException { for (int i = 0; i < 10; i++) { @@ -284,7 +350,7 @@ private void addMixedTextDocs(TextFieldType textFieldType, IndexWriter w) throws w.addDocument(doc); } - } + } private void addFields(Document doc, List createFields) { for (Field field : createFields) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java index ec15e2fc3e853..dbff6daed6285 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java @@ -34,19 +34,34 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTextAggregationBuilder; import java.io.IOException; import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.elasticsearch.search.aggregations.AggregationBuilders.sampler; +import static org.elasticsearch.search.aggregations.AggregationBuilders.significantText; public class SignificantTextAggregatorTests extends AggregatorTestCase { - - + + /** + * For each provided field type, we also register an alias with name {@code -alias}. + */ + @Override + protected Map getFieldAliases(MappedFieldType... fieldTypes) { + return Arrays.stream(fieldTypes).collect(Collectors.toMap( + ft -> ft.name() + "-alias", + Function.identity())); + } + /** * Uses the significant text aggregation to find the keywords in text fields */ @@ -59,22 +74,7 @@ public void testSignificance() throws IOException { indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { - for (int i = 0; i < 10; i++) { - Document doc = new Document(); - StringBuilder text = new StringBuilder("common "); - if (i % 2 == 0) { - text.append("odd "); - } else { - text.append("even separator" + i + " duplicate duplicate duplicate duplicate duplicate duplicate "); - } - - doc.add(new Field("text", text.toString(), textFieldType)); - String json ="{ \"text\" : \"" + text.toString() + "\","+ - " \"json_only_field\" : \"" + text.toString() + "\"" + - " }"; - doc.add(new StoredField("_source", new BytesRef(json))); - w.addDocument(doc); - } + indexDocuments(w, textFieldType); SignificantTextAggregationBuilder sigAgg = new SignificantTextAggregationBuilder("sig_text", "text").filterDuplicateText(true); if(randomBoolean()){ @@ -82,37 +82,104 @@ public void testSignificance() throws IOException { } SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler") .subAggregation(sigAgg); - + try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); IndexSearcher searcher = new IndexSearcher(reader); - + // Search "odd" which should have no duplication Sampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aggBuilder, textFieldType); SignificantTerms terms = sampler.getAggregations().get("sig_text"); - + assertNull(terms.getBucketByKey("even")); - assertNull(terms.getBucketByKey("duplicate")); - assertNull(terms.getBucketByKey("common")); + assertNull(terms.getBucketByKey("duplicate")); + assertNull(terms.getBucketByKey("common")); assertNotNull(terms.getBucketByKey("odd")); // Search "even" which will have duplication sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aggBuilder, textFieldType); terms = sampler.getAggregations().get("sig_text"); - + assertNull(terms.getBucketByKey("odd")); - assertNull(terms.getBucketByKey("duplicate")); - assertNull(terms.getBucketByKey("common")); + assertNull(terms.getBucketByKey("duplicate")); + assertNull(terms.getBucketByKey("common")); assertNull(terms.getBucketByKey("separator2")); assertNull(terms.getBucketByKey("separator4")); assertNull(terms.getBucketByKey("separator6")); assertNotNull(terms.getBucketByKey("even")); - + } } } - + + public void testFieldAlias() throws IOException { + TextFieldType textFieldType = new TextFieldType(); + textFieldType.setName("text"); + textFieldType.setIndexAnalyzer(new NamedAnalyzer("my_analyzer", AnalyzerScope.GLOBAL, new StandardAnalyzer())); + + IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); + indexWriterConfig.setMaxBufferedDocs(100); + indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { + indexDocuments(w, textFieldType); + + SignificantTextAggregationBuilder agg = significantText("sig_text", "text") + .filterDuplicateText(true); + SignificantTextAggregationBuilder aliasAgg = significantText("sig_text", "text-alias") + .filterDuplicateText(true); + + if (randomBoolean()) { + List sourceFieldNames = Arrays.asList(new String [] {"json_only_field"}); + agg.sourceFieldNames(sourceFieldNames); + aliasAgg.sourceFieldNames(sourceFieldNames); + } + + try (IndexReader reader = DirectoryReader.open(w)) { + assertEquals("test expects a single segment", 1, reader.leaves().size()); + IndexSearcher searcher = new IndexSearcher(reader); + + SamplerAggregationBuilder samplerAgg = sampler("sampler").subAggregation(agg); + SamplerAggregationBuilder aliasSamplerAgg = sampler("sampler").subAggregation(aliasAgg); + + Sampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), samplerAgg, textFieldType); + Sampler aliasSampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aliasSamplerAgg, textFieldType); + + SignificantTerms terms = sampler.getAggregations().get("sig_text"); + SignificantTerms aliasTerms = aliasSampler.getAggregations().get("sig_text"); + assertFalse(terms.getBuckets().isEmpty()); + assertEquals(terms, aliasTerms); + + sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), samplerAgg, textFieldType); + aliasSampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aliasSamplerAgg, textFieldType); + + terms = sampler.getAggregations().get("sig_text"); + aliasTerms = aliasSampler.getAggregations().get("sig_text"); + assertFalse(terms.getBuckets().isEmpty()); + assertEquals(terms, aliasTerms); + } + } + } + + private void indexDocuments(IndexWriter writer, TextFieldType textFieldType) throws IOException { + for (int i = 0; i < 10; i++) { + Document doc = new Document(); + StringBuilder text = new StringBuilder("common "); + if (i % 2 == 0) { + text.append("odd "); + } else { + text.append("even separator" + i + " duplicate duplicate duplicate duplicate duplicate duplicate "); + } + + doc.add(new Field("text", text.toString(), textFieldType)); + String json ="{ \"text\" : \"" + text.toString() + "\","+ + " \"json_only_field\" : \"" + text.toString() + "\"" + + " }"; + doc.add(new StoredField("_source", new BytesRef(json))); + writer.addDocument(doc); + } + } + /** * Test documents with arrays of text */ @@ -137,13 +204,13 @@ public void testSignificanceOnTextArrays() throws IOException { sigAgg.sourceFieldNames(Arrays.asList(new String [] {"title", "text"})); try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); - IndexSearcher searcher = new IndexSearcher(reader); + IndexSearcher searcher = new IndexSearcher(reader); searchAndReduce(searcher, new TermQuery(new Term("text", "foo")), sigAgg, textFieldType); // No significant results to be found in this test - only checking we don't end up // with the internal exception discovered in issue https://github.com/elastic/elasticsearch/issues/25029 } } } - - + + } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 4ff3c5fc5b484..d74e43327fd03 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -809,12 +809,12 @@ public void testUnmapped() throws Exception { fieldType1.setHasDocValues(true); MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); - fieldType1.setName("another_long"); - fieldType1.setHasDocValues(true); + fieldType2.setName("another_long"); + fieldType2.setHasDocValues(true); MappedFieldType fieldType3 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); - fieldType1.setName("another_double"); - fieldType1.setHasDocValues(true); + fieldType3.setName("another_double"); + fieldType3.setHasDocValues(true); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); ValueType[] valueTypes = new ValueType[]{ValueType.STRING, ValueType.LONG, ValueType.DOUBLE}; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index 1b4ae466bb3bc..b3a5df4dbfc07 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -18,12 +18,7 @@ */ package org.elasticsearch.search.aggregations.metrics; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - +import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; @@ -36,6 +31,14 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.hamcrest.core.IsNull; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; @@ -61,6 +64,33 @@ protected Collection> nodePlugins() { return Collections.singleton(MetricAggScriptPlugin.class); } + @Override + public void setupSuiteScopeCluster() throws Exception { + super.setupSuiteScopeCluster(); + + // Create two indices and add the field 'route_length_miles' as an alias in + // one, and a concrete field in the other. + prepareCreate("old_index") + .addMapping("_doc", + "transit_mode", "type=keyword", + "distance", "type=double", + "route_length_miles", "type=alias,path=distance") + .get(); + prepareCreate("new_index") + .addMapping("_doc", + "transit_mode", "type=keyword", + "route_length_miles", "type=double") + .get(); + + List builders = new ArrayList<>(); + builders.add(client().prepareIndex("old_index", "_doc").setSource("transit_mode", "train", "distance", 42.0)); + builders.add(client().prepareIndex("old_index", "_doc").setSource("transit_mode", "bus", "distance", 50.5)); + builders.add(client().prepareIndex("new_index", "_doc").setSource("transit_mode", "train", "route_length_miles", 100.2)); + + indexRandom(true, builders); + ensureSearchable(); + } + @Override public void testEmptyAggregation() throws Exception { @@ -382,4 +412,54 @@ public void testDontCacheScripts() throws Exception { assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(1L)); } + + public void testFieldAlias() { + SearchResponse response = client().prepareSearch("old_index", "new_index") + .addAggregation(sum("sum") + .field("route_length_miles")) + .execute().actionGet(); + + assertSearchResponse(response); + + Sum sum = response.getAggregations().get("sum"); + assertThat(sum, IsNull.notNullValue()); + assertThat(sum.getName(), equalTo("sum")); + assertThat(sum.getValue(), equalTo(192.7)); + } + + public void testFieldAliasInSubAggregation() { + SearchResponse response = client().prepareSearch("old_index", "new_index") + .addAggregation(terms("terms") + .field("transit_mode") + .subAggregation(sum("sum") + .field("route_length_miles"))) + .execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + + List buckets = terms.getBuckets(); + assertThat(buckets.size(), equalTo(2)); + + Terms.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("train")); + assertThat(bucket.getDocCount(), equalTo(2L)); + + Sum sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + assertThat(sum.getValue(), equalTo(142.2)); + + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("bus")); + assertThat(bucket.getDocCount(), equalTo(1L)); + + sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + assertThat(sum.getValue(), equalTo(50.5)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java index b2a949ceeee1a..5124503fc0369 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java @@ -26,10 +26,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngine; @@ -344,8 +342,7 @@ public void testSelfReferencingAggStateAfterCombine() throws IOException { * is final and cannot be mocked */ @Override - protected QueryShardContext queryShardContextMock(MapperService mapperService, final MappedFieldType[] fieldTypes, - CircuitBreakerService circuitBreakerService) { + protected QueryShardContext queryShardContextMock(MapperService mapperService) { MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, SCRIPTS); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java index ae65bc9f32c9a..b213ca785e234 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java @@ -258,4 +258,27 @@ public void testUnmappedBoolean() throws Exception { assertEquals(1, values.nextValue()); } } + + + public void testFieldAlias() throws Exception { + IndexService indexService = createIndex("index", Settings.EMPTY, "type", + "field", "type=keyword", "alias", "type=alias,path=field"); + client().prepareIndex("index", "type", "1") + .setSource("field", "value") + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + + try (Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { + QueryShardContext context = indexService.newQueryShardContext(0, searcher.reader(), () -> 42L, null); + ValuesSourceConfig config = ValuesSourceConfig.resolve( + context, ValueType.STRING, "alias", null, null, null, null); + ValuesSource.Bytes valuesSource = config.toValuesSource(context); + + LeafReaderContext ctx = searcher.reader().leaves().get(0); + SortedBinaryDocValues values = valuesSource.bytesValues(ctx); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(new BytesRef("value"), values.nextValue()); + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index e5af22cd2ae65..069c72c10b496 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -171,6 +171,106 @@ public void testHighlightingWithWildcardName() throws IOException { } } + public void testFieldAlias() throws IOException { + XContentBuilder mappings = jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("text") + .field("type", "text") + .field("store", true) + .field("term_vector", "with_positions_offsets") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "text") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mappings)); + + client().prepareIndex("test", "type", "1").setSource("text", "foo").get(); + refresh(); + + for (String type : ALL_TYPES) { + HighlightBuilder builder = new HighlightBuilder() + .field(new Field("alias").highlighterType(type)) + .requireFieldMatch(randomBoolean()); + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("alias", "foo")) + .highlighter(builder) + .get(); + assertHighlight(search, 0, "alias", 0, equalTo("foo")); + } + } + + public void testFieldAliasWithSourceLookup() throws IOException { + XContentBuilder mappings = jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("text") + .field("type", "text") + .field("analyzer", "whitespace") + .field("store", false) + .field("term_vector", "with_positions_offsets") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "text") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mappings)); + + client().prepareIndex("test", "type", "1").setSource("text", "foo bar").get(); + refresh(); + + for (String type : ALL_TYPES) { + HighlightBuilder builder = new HighlightBuilder() + .field(new Field("alias").highlighterType(type)) + .requireFieldMatch(randomBoolean()); + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("alias", "bar")) + .highlighter(builder) + .get(); + assertHighlight(search, 0, "alias", 0, equalTo("foo bar")); + } + } + + public void testFieldAliasWithWildcardField() throws IOException { + XContentBuilder mappings = jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("keyword") + .field("type", "keyword") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "keyword") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mappings)); + + client().prepareIndex("test", "type", "1").setSource("keyword", "foo").get(); + refresh(); + + HighlightBuilder builder = new HighlightBuilder() + .field(new Field("al*")) + .requireFieldMatch(false); + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("alias", "foo")) + .highlighter(builder) + .get(); + assertHighlight(search, 0, "alias", 0, equalTo("foo")); + } + + public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOException { XContentBuilder mappings = jsonBuilder(); mappings.startObject(); diff --git a/server/src/test/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/test/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java new file mode 100644 index 0000000000000..8440357758ea8 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -0,0 +1,151 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.fieldcaps; + +import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Before; + +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Predicate; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; + +public class FieldCapabilitiesIT extends ESIntegTestCase { + + @Before + public void setUp() throws Exception { + super.setUp(); + + XContentBuilder oldIndexMapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("distance") + .field("type", "double") + .endObject() + .startObject("route_length_miles") + .field("type", "alias") + .field("path", "distance") + .endObject() + .startObject("playlist") + .field("type", "text") + .endObject() + .startObject("secret_soundtrack") + .field("type", "alias") + .field("path", "playlist") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("old_index").addMapping("_doc", oldIndexMapping)); + + XContentBuilder newIndexMapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("distance") + .field("type", "text") + .endObject() + .startObject("route_length_miles") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("new_index").addMapping("_doc", newIndexMapping)); + } + + public static class FieldFilterPlugin extends Plugin implements MapperPlugin { + @Override + public Function> getFieldFilter() { + return index -> field -> !field.equals("playlist"); + } + } + + @Override + protected Collection> nodePlugins() { + return Collections.singleton(FieldFilterPlugin.class); + } + + public void testFieldAlias() { + FieldCapabilitiesResponse response = client().prepareFieldCaps().setFields("distance", "route_length_miles") + .execute().actionGet(); + + // Ensure the response has entries for both requested fields. + assertTrue(response.get().containsKey("distance")); + assertTrue(response.get().containsKey("route_length_miles")); + + // Check the capabilities for the 'distance' field. + Map distance = response.getField("distance"); + assertEquals(2, distance.size()); + + assertTrue(distance.containsKey("double")); + assertEquals( + new FieldCapabilities("distance", "double", true, true, new String[] {"old_index"}, null, null), + distance.get("double")); + + assertTrue(distance.containsKey("text")); + assertEquals( + new FieldCapabilities("distance", "text", true, false, new String[] {"new_index"}, null, null), + distance.get("text")); + + // Check the capabilities for the 'route_length_miles' alias. + Map routeLength = response.getField("route_length_miles"); + assertEquals(1, routeLength.size()); + + assertTrue(routeLength.containsKey("double")); + assertEquals( + new FieldCapabilities("route_length_miles", "double", true, true), + routeLength.get("double")); + } + + public void testFieldAliasWithWildcard() { + FieldCapabilitiesResponse response = client().prepareFieldCaps().setFields("route*") + .execute().actionGet(); + + assertEquals(1, response.get().size()); + assertTrue(response.get().containsKey("route_length_miles")); + } + + public void testFieldAliasFiltering() { + FieldCapabilitiesResponse response = client().prepareFieldCaps().setFields( + "secret-soundtrack", "route_length_miles") + .execute().actionGet(); + assertEquals(1, response.get().size()); + assertTrue(response.get().containsKey("route_length_miles")); + } + + public void testFieldAliasFilteringWithWildcard() { + FieldCapabilitiesResponse response = client().prepareFieldCaps() + .setFields("distance", "secret*") + .execute().actionGet(); + assertEquals(1, response.get().size()); + assertTrue(response.get().containsKey("distance")); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 2e9426bbe6879..2126e0e94eb92 100644 --- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -46,7 +47,12 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.ReadableDateTime; import org.joda.time.base.BaseDateTime; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; import java.time.ZoneOffset; import java.time.ZonedDateTime; @@ -913,6 +919,163 @@ public void testScriptFields() throws Exception { } } + public void testDocValueFieldsWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("text_field") + .field("type", "text") + .field("fielddata", true) + .endObject() + .startObject("date_field") + .field("type", "date") + .field("format", "yyyy-MM-dd") + .endObject() + .startObject("text_field_alias") + .field("type", "alias") + .field("path", "text_field") + .endObject() + .startObject("date_field_alias") + .field("type", "alias") + .field("path", "date_field") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mapping)); + ensureGreen("test"); + + DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC); + DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd"); + + index("test", "type", "1", "text_field", "foo", "date_field", formatter.print(date)); + refresh("test"); + + SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery()) + .addDocValueField("text_field_alias") + .addDocValueField("date_field_alias", "use_field_mapping") + .addDocValueField("date_field"); + SearchResponse searchResponse = builder.execute().actionGet(); + + assertNoFailures(searchResponse); + assertHitCount(searchResponse, 1); + SearchHit hit = searchResponse.getHits().getAt(0); + + Map fields = hit.getFields(); + assertThat(fields.keySet(), equalTo(newHashSet("text_field_alias", "date_field_alias", "date_field"))); + + DocumentField textFieldAlias = fields.get("text_field_alias"); + assertThat(textFieldAlias.getName(), equalTo("text_field_alias")); + assertThat(textFieldAlias.getValue(), equalTo("foo")); + + DocumentField dateFieldAlias = fields.get("date_field_alias"); + assertThat(dateFieldAlias.getName(), equalTo("date_field_alias")); + assertThat(dateFieldAlias.getValue(), + equalTo("1990-12-29")); + + DocumentField dateField = fields.get("date_field"); + assertThat(dateField.getName(), equalTo("date_field")); + + ReadableDateTime fetchedDate = dateField.getValue(); + assertThat(fetchedDate, equalTo(date)); + } + + + public void testStoredFieldsWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field1") + .field("type", "text") + .field("store", true) + .endObject() + .startObject("field2") + .field("type", "text") + .field("store", false) + .endObject() + .startObject("field1-alias") + .field("type", "alias") + .field("path", "field1") + .endObject() + .startObject("field2-alias") + .field("type", "alias") + .field("path", "field2") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mapping)); + + index("test", "type", "1", "field1", "value1", "field2", "value2"); + refresh("test"); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addStoredField("field1-alias") + .addStoredField("field2-alias") + .get(); + assertHitCount(searchResponse, 1L); + + SearchHit hit = searchResponse.getHits().getAt(0); + assertEquals(1, hit.getFields().size()); + assertTrue(hit.getFields().containsKey("field1-alias")); + + DocumentField field = hit.getFields().get("field1-alias"); + assertThat(field.getValue().toString(), equalTo("value1")); + } + + public void testWildcardStoredFieldsWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field1") + .field("type", "text") + .field("store", true) + .endObject() + .startObject("field2") + .field("type", "text") + .field("store", false) + .endObject() + .startObject("field1-alias") + .field("type", "alias") + .field("path", "field1") + .endObject() + .startObject("field2-alias") + .field("type", "alias") + .field("path", "field2") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mapping)); + + index("test", "type", "1", "field1", "value1", "field2", "value2"); + refresh("test"); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addStoredField("field*") + .get(); + assertHitCount(searchResponse, 1L); + + SearchHit hit = searchResponse.getHits().getAt(0); + assertEquals(2, hit.getFields().size()); + assertTrue(hit.getFields().containsKey("field1")); + assertTrue(hit.getFields().containsKey("field1-alias")); + + DocumentField field = hit.getFields().get("field1"); + assertThat(field.getValue().toString(), equalTo("value1")); + + DocumentField fieldAlias = hit.getFields().get("field1-alias"); + assertThat(fieldAlias.getValue().toString(), equalTo("value1")); + } + public void testLoadMetadata() throws Exception { assertAcked(prepareCreate("test")); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java index 7906165b090af..2ff7d0c1383d3 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java @@ -24,8 +24,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; @@ -58,10 +56,10 @@ protected void setupSuiteScopeCluster() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location").field("type", "geo_point"); - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); + + assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", "location", + "type=geo_point", "alias", + "type=alias,path=location")); ensureGreen(); indexRandom(true, client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() @@ -132,4 +130,17 @@ public void testSimpleUnclosedPolygon() throws Exception { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); } } + + public void testFieldAlias() { + List points = new ArrayList<>(); + points.add(new GeoPoint(40.7, -74.0)); + points.add(new GeoPoint(40.7, -74.1)); + points.add(new GeoPoint(40.8, -74.1)); + points.add(new GeoPoint(40.8, -74.0)); + points.add(new GeoPoint(40.7, -74.0)); + SearchResponse searchResponse = client().prepareSearch("test") // from NY + .setQuery(boolQuery().must(geoPolygonQuery("alias", points))) + .execute().actionGet(); + assertHitCount(searchResponse, 4); + } } diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index d3a31f12c57db..6f204796e4118 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -19,7 +19,10 @@ package org.elasticsearch.search.geo; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.builders.CoordinatesBuilder; import org.elasticsearch.common.geo.builders.EnvelopeBuilder; import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; @@ -27,20 +30,16 @@ import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentType; -import org.locationtech.spatial4j.shape.Rectangle; -import org.locationtech.jts.geom.Coordinate; - -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.spatial4j.shape.Rectangle; import java.io.IOException; import java.util.Locale; @@ -503,4 +502,33 @@ public void testPointsOnlyExplicit() throws Exception { assertEquals(2, response.getHits().getTotalHits()); } + + public void testFieldAlias() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("location") + .field("type", "geo_shape") + .field("tree", randomBoolean() ? "quadtree" : "geohash") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "location") + .endObject() + .endObject() + .endObject() + .endObject(); + + createIndex("test", Settings.EMPTY, "type", mapping); + + ShapeBuilder shape = RandomShapeGenerator.createShape(random(), RandomShapeGenerator.ShapeType.MULTIPOINT); + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("location", shape).endObject()) + .setRefreshPolicy(IMMEDIATE).get(); + + SearchResponse response = client().prepareSearch("test") + .setQuery(geoShapeQuery("alias", shape)) + .execute().actionGet(); + assertEquals(1, response.getHits().getTotalHits()); + } } diff --git a/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java b/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java new file mode 100644 index 0000000000000..dfdbef1c3d539 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.lookup; + +import org.elasticsearch.index.fielddata.AtomicFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import static org.mockito.AdditionalAnswers.returnsFirstArg; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class LeafDocLookupTests extends ESTestCase { + private ScriptDocValues docValues; + private LeafDocLookup docLookup; + + @Before + public void setUp() throws Exception { + super.setUp(); + + MappedFieldType fieldType = mock(MappedFieldType.class); + when(fieldType.name()).thenReturn("field"); + when(fieldType.valueForDisplay(anyObject())).then(returnsFirstArg()); + + MapperService mapperService = mock(MapperService.class); + when(mapperService.fullName("field")).thenReturn(fieldType); + when(mapperService.fullName("alias")).thenReturn(fieldType); + + docValues = mock(ScriptDocValues.class); + + AtomicFieldData atomicFieldData = mock(AtomicFieldData.class); + doReturn(docValues).when(atomicFieldData).getScriptValues(); + + IndexFieldData fieldData = mock(IndexFieldData.class); + when(fieldData.getFieldName()).thenReturn("field"); + doReturn(atomicFieldData).when(fieldData).load(anyObject()); + + docLookup = new LeafDocLookup(mapperService, + ignored -> fieldData, + new String[] { "type" }, + null); + } + + public void testBasicLookup() { + ScriptDocValues fetchedDocValues = docLookup.get("field"); + assertEquals(docValues, fetchedDocValues); + } + + public void testLookupWithFieldAlias() { + ScriptDocValues fetchedDocValues = docLookup.get("alias"); + assertEquals(docValues, fetchedDocValues); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java b/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java new file mode 100644 index 0000000000000..1195893a28af0 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.lookup; + +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.StoredFieldVisitor; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.Collections; +import java.util.List; + +import static org.mockito.AdditionalAnswers.returnsFirstArg; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyInt; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class LeafFieldsLookupTests extends ESTestCase { + private LeafFieldsLookup fieldsLookup; + + @Before + public void setUp() throws Exception { + super.setUp(); + + MappedFieldType fieldType = mock(MappedFieldType.class); + when(fieldType.name()).thenReturn("field"); + when(fieldType.valueForDisplay(anyObject())).then(returnsFirstArg()); + + MapperService mapperService = mock(MapperService.class); + when(mapperService.fullName("field")).thenReturn(fieldType); + when(mapperService.fullName("alias")).thenReturn(fieldType); + + FieldInfo mockFieldInfo = new FieldInfo("field", 1, false, false, true, + IndexOptions.NONE, DocValuesType.NONE, -1, Collections.emptyMap(), 0, 0, false); + + LeafReader leafReader = mock(LeafReader.class); + doAnswer(invocation -> { + Object[] args = invocation.getArguments(); + StoredFieldVisitor visitor = (StoredFieldVisitor) args[1]; + visitor.doubleField(mockFieldInfo, 2.718); + return null; + }).when(leafReader).document(anyInt(), any(StoredFieldVisitor.class)); + + fieldsLookup = new LeafFieldsLookup(mapperService, + new String[] { "type" }, + leafReader); + } + + public void testBasicLookup() { + FieldLookup fieldLookup = (FieldLookup) fieldsLookup.get("field"); + assertEquals("field", fieldLookup.fieldType().name()); + + List values = fieldLookup.getValues(); + assertNotNull(values); + assertEquals(1, values.size()); + assertEquals(2.718, values.get(0)); + } + + public void testLookupWithFieldAlias() { + FieldLookup fieldLookup = (FieldLookup) fieldsLookup.get("alias"); + assertEquals("field", fieldLookup.fieldType().name()); + + List values = fieldLookup.getValues(); + assertNotNull(values); + assertEquals(1, values.size()); + assertEquals(2.718, values.get(0)); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index 81f48aac074e6..dedd0f036641b 100644 --- a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -337,6 +338,36 @@ public void testNumericField() throws Exception { assertHitCount(searchResponse, 0L); } + public void testMoreLikeThisWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("text") + .field("type", "text") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "text") + .endObject() + .endObject() + .endObject() + .endObject(); + + assertAcked(prepareCreate("test").addMapping("_doc", mapping)); + ensureGreen(); + + index("test", "_doc", "1", "text", "lucene"); + index("test", "_doc", "2", "text", "lucene release"); + refresh(); + + Item item = new Item("test", "_doc", "1"); + QueryBuilder query = QueryBuilders.moreLikeThisQuery(new String[] {"alias"}, null, new Item[] {item}) + .minTermFreq(1) + .minDocFreq(1); + SearchResponse response = client().prepareSearch().setQuery(query).get(); + assertHitCount(response, 1L); + } + public void testSimpleMoreLikeInclude() throws Exception { logger.info("Creating index test"); assertAcked(prepareCreate("test").addMapping("type1", diff --git a/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java b/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java index 6e4a1b7d618ea..45910044d499f 100644 --- a/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; @@ -141,4 +142,89 @@ public void testExists() throws Exception { } } } + + public void testFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("bar") + .field("type", "long") + .endObject() + .startObject("foo") + .field("type", "object") + .startObject("properties") + .startObject("bar") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .startObject("foo-bar") + .field("type", "alias") + .field("path", "foo.bar") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("idx").addMapping("type", mapping)); + ensureGreen("idx"); + + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx", "type").setSource("bar", 3)); + indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", singletonMap("bar", 2.718))); + indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", singletonMap("bar", 6.283))); + indexRandom(true, false, indexRequests); + + Map expected = new LinkedHashMap<>(); + expected.put("foo.bar", 2); + expected.put("foo-bar", 2); + expected.put("foo*", 2); + expected.put("*bar", 3); + + for (Map.Entry entry : expected.entrySet()) { + String fieldName = entry.getKey(); + int expectedCount = entry.getValue(); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(QueryBuilders.existsQuery(fieldName)) + .get(); + assertSearchResponse(response); + assertHitCount(response, expectedCount); + } + } + + public void testFieldAliasWithNoDocValues() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("foo") + .field("type", "long") + .field("doc_values", false) + .endObject() + .startObject("foo-alias") + .field("type", "alias") + .field("path", "foo") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("idx").addMapping("type", mapping)); + ensureGreen("idx"); + + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", 3)); + indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", 43)); + indexRandom(true, false, indexRequests); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(QueryBuilders.existsQuery("foo-alias")) + .get(); + assertSearchResponse(response); + assertHitCount(response, 2); + } } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java index 7145f9db2db23..5caab8c9dfec6 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java @@ -51,6 +51,7 @@ import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -376,6 +377,70 @@ public void testLimitOnExpandedFields() throws Exception { containsString("field expansion matches too many fields, limit: 1024, got: 1025")); } + public void testFieldAlias() throws Exception { + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRandom(true, false, indexRequests); + + SearchResponse response = client().prepareSearch("test") + .setQuery(queryStringQuery("value").field("f3_alias")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 2); + assertHits(response.getHits(), "2", "3"); + } + + public void testFieldAliasWithEmbeddedFieldNames() throws Exception { + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRandom(true, false, indexRequests); + + SearchResponse response = client().prepareSearch("test") + .setQuery(queryStringQuery("f3_alias:value AND f2:three")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 1); + assertHits(response.getHits(), "3"); + } + + public void testFieldAliasWithWildcardField() throws Exception { + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRandom(true, false, indexRequests); + + SearchResponse response = client().prepareSearch("test") + .setQuery(queryStringQuery("value").field("f3_*")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 2); + assertHits(response.getHits(), "2", "3"); + } + + public void testFieldAliasOnDisallowedFieldType() throws Exception { + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRandom(true, false, indexRequests); + + // The wildcard field matches aliases for both a text and boolean field. + // By default, the boolean field should be ignored when building the query. + SearchResponse response = client().prepareSearch("test") + .setQuery(queryStringQuery("text").field("f*_alias")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 1); + assertHits(response.getHits(), "1"); + } + private void assertHits(SearchHits hits, String... ids) { assertThat(hits.getTotalHits(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index be71867edd2a0..860c3e074f3df 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.query; +import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.English; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -26,13 +27,16 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.Operator; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; @@ -1822,4 +1826,114 @@ public void testRangeQueryRangeFields_24744() throws Exception { SearchResponse searchResponse = client().prepareSearch("test").setQuery(range).get(); assertHitCount(searchResponse, 1); } + + public void testRangeQueryTypeField_31476() throws Exception { + assertAcked(prepareCreate("test").addMapping("foo", "field", "type=keyword")); + + client().prepareIndex("test", "foo", "1").setSource("field", "value").get(); + refresh(); + + RangeQueryBuilder range = new RangeQueryBuilder("_type").from("ape").to("zebra"); + SearchResponse searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 1); + + range = new RangeQueryBuilder("_type").from("monkey").to("zebra"); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 0); + + range = new RangeQueryBuilder("_type").from("ape").to("donkey"); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 0); + + range = new RangeQueryBuilder("_type").from("ape").to("foo").includeUpper(false); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 0); + + range = new RangeQueryBuilder("_type").from("ape").to("foo").includeUpper(true); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 1); + + range = new RangeQueryBuilder("_type").from("foo").to("zebra").includeLower(false); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 0); + + range = new RangeQueryBuilder("_type").from("foo").to("zebra").includeLower(true); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 1); + } + + public void testNestedQueryWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("section") + .field("type", "nested") + .startObject("properties") + .startObject("distance") + .field("type", "long") + .endObject() + .startObject("route_length_miles") + .field("type", "alias") + .field("path", "section.distance") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("index").addMapping("_doc", mapping)); + + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .startObject("section") + .field("distance", 42) + .endObject() + .endObject(); + + index("index", "_doc", "1", source); + refresh(); + + QueryBuilder nestedQuery = QueryBuilders.nestedQuery("section", + QueryBuilders.termQuery("section.route_length_miles", 42), + ScoreMode.Max); + SearchResponse searchResponse = client().prepareSearch("index").setQuery(nestedQuery).get(); + assertHitCount(searchResponse, 1); + } + + public void testFieldAliasesForMetaFields() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("id-alias") + .field("type", "alias") + .field("path", "_id") + .endObject() + .startObject("routing-alias") + .field("type", "alias") + .field("path", "_routing") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mapping)); + + IndexRequestBuilder indexRequest = client().prepareIndex("test", "type") + .setId("1") + .setRouting("custom") + .setSource("field", "value"); + indexRandom(true, false, indexRequest); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(termQuery("routing-alias", "custom")) + .addDocValueField("id-alias") + .get(); + assertHitCount(searchResponse, 1L); + + SearchHit hit = searchResponse.getHits().getAt(0); + assertEquals(2, hit.getFields().size()); + assertTrue(hit.getFields().containsKey("id-alias")); + + DocumentField field = hit.getFields().get("id-alias"); + assertThat(field.getValue().toString(), equalTo("1")); + } } diff --git a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index d7db62b61438c..5176c327ac78e 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -58,6 +58,7 @@ import static java.util.Collections.singletonList; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; @@ -585,6 +586,67 @@ public void testLimitOnExpandedFields() throws Exception { containsString("field expansion matches too many fields, limit: 1024, got: 1025")); } + public void testFieldAlias() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + assertAcked(prepareCreate("test").setSource(indexBody, XContentType.JSON)); + ensureGreen("test"); + + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRandom(true, false, indexRequests); + + SearchResponse response = client().prepareSearch("test") + .setQuery(simpleQueryStringQuery("value").field("f3_alias")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 2); + assertHits(response.getHits(), "2", "3"); + } + + public void testFieldAliasWithWildcardField() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + assertAcked(prepareCreate("test").setSource(indexBody, XContentType.JSON)); + ensureGreen("test"); + + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRandom(true, false, indexRequests); + + SearchResponse response = client().prepareSearch("test") + .setQuery(simpleQueryStringQuery("value").field("f3_*")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 2); + assertHits(response.getHits(), "2", "3"); + } + + + public void testFieldAliasOnDisallowedFieldType() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + assertAcked(prepareCreate("test").setSource(indexBody, XContentType.JSON)); + ensureGreen("test"); + + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRandom(true, false, indexRequests); + + // The wildcard field matches aliases for both a text and boolean field. + // By default, the boolean field should be ignored when building the query. + SearchResponse response = client().prepareSearch("test") + .setQuery(queryStringQuery("text").field("f*_alias")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 1); + assertHits(response.getHits(), "1"); + } + private void assertHits(SearchHits hits, String... ids) { assertThat(hits.getTotalHits(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index 3af370326f5e4..ff0196aacdf16 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -40,6 +40,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.Matchers; @@ -1572,4 +1573,60 @@ public void testScriptFieldSort() throws Exception { } } } + + public void testFieldAlias() throws Exception { + // Create two indices and add the field 'route_length_miles' as an alias in + // one, and a concrete field in the other. + assertAcked(prepareCreate("old_index") + .addMapping("_doc", "distance", "type=double", "route_length_miles", "type=alias,path=distance")); + assertAcked(prepareCreate("new_index") + .addMapping("_doc", "route_length_miles", "type=double")); + ensureGreen("old_index", "new_index"); + + List builders = new ArrayList<>(); + builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 42.0)); + builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 50.5)); + builders.add(client().prepareIndex("new_index", "_doc").setSource("route_length_miles", 100.2)); + indexRandom(true, true, builders); + + SearchResponse response = client().prepareSearch() + .setQuery(matchAllQuery()) + .setSize(builders.size()) + .addSort(SortBuilders.fieldSort("route_length_miles")) + .execute().actionGet(); + SearchHits hits = response.getHits(); + + assertEquals(3, hits.getHits().length); + assertEquals(42.0, hits.getAt(0).getSortValues()[0]); + assertEquals(50.5, hits.getAt(1).getSortValues()[0]); + assertEquals(100.2, hits.getAt(2).getSortValues()[0]); + } + + public void testFieldAliasesWithMissingValues() throws Exception { + // Create two indices and add the field 'route_length_miles' as an alias in + // one, and a concrete field in the other. + assertAcked(prepareCreate("old_index") + .addMapping("_doc", "distance", "type=double", "route_length_miles", "type=alias,path=distance")); + assertAcked(prepareCreate("new_index") + .addMapping("_doc", "route_length_miles", "type=double")); + ensureGreen("old_index", "new_index"); + + List builders = new ArrayList<>(); + builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 42.0)); + builders.add(client().prepareIndex("old_index", "_doc").setSource(Collections.emptyMap())); + builders.add(client().prepareIndex("new_index", "_doc").setSource("route_length_miles", 100.2)); + indexRandom(true, true, builders); + + SearchResponse response = client().prepareSearch() + .setQuery(matchAllQuery()) + .setSize(builders.size()) + .addSort(SortBuilders.fieldSort("route_length_miles").missing(120.3)) + .execute().actionGet(); + SearchHits hits = response.getHits(); + + assertEquals(3, hits.getHits().length); + assertEquals(42.0, hits.getAt(0).getSortValues()[0]); + assertEquals(100.2, hits.getAt(1).getSortValues()[0]); + assertEquals(120.3, hits.getAt(2).getSortValues()[0]); + } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 00a287f02528c..9d1f01fe3289b 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -160,7 +160,7 @@ public void testBuild() throws IOException { indexSettings); MapperService mapperService = mock(MapperService.class); ScriptService scriptService = mock(ScriptService.class); - MappedFieldType fieldType = mockFieldType(); + MappedFieldType fieldType = mockFieldType(suggestionBuilder.field()); boolean fieldTypeSearchAnalyzerSet = randomBoolean(); if (fieldTypeSearchAnalyzerSet) { NamedAnalyzer searchAnalyzer = new NamedAnalyzer("fieldSearchAnalyzer", AnalyzerScope.INDEX, new SimpleAnalyzer()); @@ -211,8 +211,10 @@ public void testBuild() throws IOException { */ protected abstract void assertSuggestionContext(SB builder, SuggestionContext context) throws IOException; - protected MappedFieldType mockFieldType() { - return mock(MappedFieldType.class); + protected MappedFieldType mockFieldType(String fieldName) { + MappedFieldType fieldType = mock(MappedFieldType.class); + when(fieldType.name()).thenReturn(fieldName); + return fieldType; } /** diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index a3fff7f9d5bcc..0c4c6e510ebf9 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.lucene.analysis.TokenStreamToAutomaton; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; @@ -36,6 +35,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.plugins.Plugin; @@ -1161,6 +1161,32 @@ public void testMultiDocSuggestions() throws Exception { assertSuggestions("foo", prefix, "suggester10", "suggester9", "suggester8", "suggester7", "suggester6"); } + public void testSuggestWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject(TYPE) + .startObject("properties") + .startObject(FIELD) + .field("type", "completion") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", FIELD) + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate(INDEX).addMapping(TYPE, mapping)); + + List builders = new ArrayList<>(); + builders.add(client().prepareIndex(INDEX, TYPE).setSource(FIELD, "apple")); + builders.add(client().prepareIndex(INDEX, TYPE).setSource(FIELD, "mango")); + builders.add(client().prepareIndex(INDEX, TYPE).setSource(FIELD, "papaya")); + indexRandom(true, false, builders); + + CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("alias").text("app"); + assertSuggestions("suggestion", suggestionBuilder, "apple"); + } public static boolean isReservedChar(char c) { switch (c) { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index 677cc4163ccf7..aaeaadd4c9f83 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -979,6 +979,35 @@ public void testSuggestWithManyCandidates() throws InterruptedException, Executi // assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging } + public void testSuggestWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("text") + .field("type", "keyword") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "text") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mapping)); + + List builders = new ArrayList<>(); + builders.add(client().prepareIndex("test", "type").setSource("text", "apple")); + builders.add(client().prepareIndex("test", "type").setSource("text", "mango")); + builders.add(client().prepareIndex("test", "type").setSource("text", "papaya")); + indexRandom(true, false, builders); + + TermSuggestionBuilder termSuggest = termSuggestion("alias").text("appple"); + + Suggest searchSuggest = searchSuggest("suggestion", termSuggest); + assertSuggestion(searchSuggest, 0, "suggestion", "apple"); + } + @Override protected Collection> nodePlugins() { return Collections.singleton(DummyTemplatePlugin.class); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 3a7451e78fb4f..f9b252f0e136b 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; @@ -38,10 +39,10 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.CompletionFieldMapper.CompletionFieldType; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; @@ -74,8 +75,7 @@ public void testIndexingWithNoContexts() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() @@ -95,7 +95,7 @@ public void testIndexingWithNoContexts() throws Exception { .endArray() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 7); } @@ -113,8 +113,7 @@ public void testIndexingWithSimpleContexts() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() @@ -129,7 +128,7 @@ public void testIndexingWithSimpleContexts() throws Exception { .endArray() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); } @@ -147,8 +146,7 @@ public void testIndexingWithSimpleNumberContexts() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() @@ -163,7 +161,7 @@ public void testIndexingWithSimpleNumberContexts() throws Exception { .endArray() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); } @@ -181,8 +179,7 @@ public void testIndexingWithSimpleBooleanContexts() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() @@ -197,7 +194,7 @@ public void testIndexingWithSimpleBooleanContexts() throws Exception { .endArray() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); } @@ -247,8 +244,7 @@ public void testIndexingWithContextList() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() @@ -261,7 +257,7 @@ public void testIndexingWithContextList() throws Exception { .endObject() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); } @@ -279,8 +275,7 @@ public void testIndexingWithMixedTypeContextList() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() @@ -293,7 +288,7 @@ public void testIndexingWithMixedTypeContextList() throws Exception { .endObject() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); } @@ -345,8 +340,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); XContentBuilder builder = jsonBuilder() .startObject() .startArray("completion") @@ -362,7 +356,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject(); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); } @@ -698,7 +692,7 @@ public void testQueryContextParsingMixedHavingNULL() throws Exception { } public void testUnknownQueryContextParsing() throws Exception { - String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -712,11 +706,10 @@ public void testUnknownQueryContextParsing() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - CompletionFieldType completionFieldType = (CompletionFieldType) fieldMapper.fieldType(); + MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + CompletionFieldType completionFieldType = (CompletionFieldType) mapperService.fullName("completion"); Exception e = expectThrows(IllegalArgumentException.class, () -> completionFieldType.getContextMappings().get("brand")); assertEquals("Unknown context name [brand], must be one of [ctx, type]", e.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 37fdb7e0aa08b..88e6ce6466622 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -164,8 +164,9 @@ protected void mutateSpecificParameters(CompletionSuggestionBuilder builder) thr } @Override - protected MappedFieldType mockFieldType() { + protected MappedFieldType mockFieldType(String fieldName) { CompletionFieldType completionFieldType = new CompletionFieldType(); + completionFieldType.setName(fieldName); completionFieldType.setContextMappings(new ContextMappings(contextMappings)); return completionFieldType; } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index 2d179f3dbe6c3..56ff157ec718d 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -20,16 +20,14 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.search.suggest.completion.context.ContextBuilder; @@ -50,7 +48,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { public void testIndexingWithNoContexts() throws Exception { - String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -60,13 +58,12 @@ public void testIndexingWithNoContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference - .bytes(jsonBuilder() + MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MappedFieldType completionFieldType = mapperService.fullName("completion"); + ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + BytesReference.bytes(jsonBuilder() .startObject() .startArray("completion") .startObject() @@ -89,7 +86,7 @@ public void testIndexingWithNoContexts() throws Exception { } public void testIndexingWithSimpleContexts() throws Exception { - String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -100,13 +97,12 @@ public void testIndexingWithSimpleContexts() throws Exception { .endArray() .endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference - .bytes(jsonBuilder() + MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MappedFieldType completionFieldType = mapperService.fullName("completion"); + ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + BytesReference.bytes(jsonBuilder() .startObject() .startArray("completion") .startObject() @@ -127,7 +123,7 @@ public void testIndexingWithSimpleContexts() throws Exception { } public void testIndexingWithContextList() throws Exception { - String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -137,13 +133,12 @@ public void testIndexingWithContextList() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference - .bytes(jsonBuilder() + MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MappedFieldType completionFieldType = mapperService.fullName("completion"); + ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + BytesReference.bytes(jsonBuilder() .startObject() .startObject("completion") .array("input", "suggestion5", "suggestion6", "suggestion7") @@ -168,7 +163,7 @@ public void testIndexingWithContextList() throws Exception { } public void testIndexingWithMultipleContexts() throws Exception { - String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -182,11 +177,10 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MappedFieldType completionFieldType = mapperService.fullName("completion"); XContentBuilder builder = jsonBuilder() .startObject() .startArray("completion") @@ -200,8 +194,8 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endArray() .endObject(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), - XContentType.JSON)); + ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + BytesReference.bytes(builder), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } diff --git a/server/src/test/java/org/elasticsearch/update/UpdateIT.java b/server/src/test/java/org/elasticsearch/update/UpdateIT.java index c86dfcb98f701..e4ea078b8f716 100644 --- a/server/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/server/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -140,8 +140,7 @@ protected Collection> nodePlugins() { private void createTestIndex() throws Exception { logger.info("--> creating index test"); - - assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); + assertAcked(prepareCreate("test").addAlias(new Alias("alias").writeIndex(randomFrom(true, null)))); } public void testUpsert() throws Exception { diff --git a/server/src/test/resources/org/elasticsearch/search/query/all-query-index.json b/server/src/test/resources/org/elasticsearch/search/query/all-query-index.json index 72c9b54f6e3da..abdc11928229f 100644 --- a/server/src/test/resources/org/elasticsearch/search/query/all-query-index.json +++ b/server/src/test/resources/org/elasticsearch/search/query/all-query-index.json @@ -11,6 +11,10 @@ "f1": {"type": "text"}, "f2": {"type": "keyword"}, "f3": {"type": "text"}, + "f3_alias": { + "type": "alias", + "path": "f3" + }, "f4": { "type": "text", "index_options": "docs" @@ -42,6 +46,10 @@ "format": "yyyy/MM/dd||epoch_millis" }, "f_bool": {"type": "boolean"}, + "f_bool_alias": { + "type": "alias", + "path": "f_bool" + }, "f_byte": {"type": "byte"}, "f_short": {"type": "short"}, "f_int": {"type": "integer"}, diff --git a/settings.gradle b/settings.gradle index 5904cc4daf4d5..bdd866e622bcd 100644 --- a/settings.gradle +++ b/settings.gradle @@ -131,3 +131,5 @@ if (extraProjects.exists()) { // enable in preparation for Gradle 5.0 enableFeaturePreview('STABLE_PUBLISHING') + +project(":libs:cli").name = 'elasticsearch-cli' diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index b751652c2ab3f..9a8480fcb635d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -596,14 +596,12 @@ protected Engine.Index indexForDoc(ParsedDocument doc) { protected Engine.Index replicaIndexForDoc(ParsedDocument doc, long version, long seqNo, boolean isRetry) { - return new Engine.Index(newUid(doc), doc, seqNo, primaryTerm.get(), version, VersionType.EXTERNAL, - Engine.Operation.Origin.REPLICA, System.nanoTime(), - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, isRetry); + return new Engine.Index(newUid(doc), doc, seqNo, primaryTerm.get(), version, null, Engine.Operation.Origin.REPLICA, + System.nanoTime(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, isRetry); } protected Engine.Delete replicaDeleteForDoc(String id, long version, long seqNo, long startTime) { - return new Engine.Delete("test", id, newUid(id), seqNo, primaryTerm.get(), version, VersionType.EXTERNAL, - Engine.Operation.Origin.REPLICA, startTime); + return new Engine.Delete("test", id, newUid(id), seqNo, 1, version, null, Engine.Operation.Origin.REPLICA, startTime); } protected static void assertVisibleCount(InternalEngine engine, int numDocs) throws IOException { assertVisibleCount(engine, numDocs, true); @@ -652,7 +650,7 @@ public static List generateSingleDocHistory(boolean forReplica forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, - forReplica ? versionType.versionTypeForReplicationAndRecovery() : versionType, + forReplica ? null : versionType, forReplica ? REPLICA : PRIMARY, System.currentTimeMillis(), -1, false ); @@ -661,7 +659,7 @@ public static List generateSingleDocHistory(boolean forReplica forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, - forReplica ? versionType.versionTypeForReplicationAndRecovery() : versionType, + forReplica ? null : versionType, forReplica ? REPLICA : PRIMARY, System.currentTimeMillis()); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java index 53fe89ac17ea5..9999a3b3748f1 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java @@ -124,14 +124,12 @@ private Engine.Operation convertToEngineOp(Translog.Operation operation, Engine. source(indexName, index.type(), index.id(), index.source(), XContentHelper.xContentType(index.source())) .routing(index.routing()), index.seqNo(), index.primaryTerm(), - index.version(), index.versionType().versionTypeForReplicationAndRecovery(), origin, - index.getAutoGeneratedIdTimestamp(), true); + index.version(), null, origin, index.getAutoGeneratedIdTimestamp(), true); return engineIndex; case DELETE: final Translog.Delete delete = (Translog.Delete) operation; final Engine.Delete engineDelete = new Engine.Delete(delete.type(), delete.id(), delete.uid(), delete.seqNo(), - delete.primaryTerm(), delete.version(), delete.versionType().versionTypeForReplicationAndRecovery(), - origin, System.nanoTime()); + delete.primaryTerm(), delete.version(), null, origin, System.nanoTime()); return engineDelete; case NO_OP: final Translog.NoOp noOp = (Translog.NoOp) operation; diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 7e0493961caa1..bd2e2fe8f1999 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -290,7 +290,7 @@ public synchronized IndexShard addReplicaWithExistingPath(final ShardPath shardP RecoverySource.PeerRecoverySource.INSTANCE); final IndexShard newReplica = - newShard(shardRouting, shardPath, indexMetaData, null, getEngineFactory(shardRouting), () -> {}, EMPTY_EVENT_LISTENER); + newShard(shardRouting, shardPath, indexMetaData, null, getEngineFactory(shardRouting), () -> {}, EMPTY_EVENT_LISTENER); replicas.add(newReplica); updateAllocationIDsOnPrimary(); return newReplica; @@ -366,8 +366,11 @@ public void recoverReplica( IndexShard replica, BiFunction targetSupplier, boolean markAsRecovering) throws IOException { - ESIndexLevelReplicationTestCase.this.recoverReplica(replica, primary, targetSupplier, markAsRecovering, activeIds(), - routingTable(Function.identity())); + final IndexShardRoutingTable routingTable = routingTable(Function.identity()); + final Set inSyncIds = activeIds(); + ESIndexLevelReplicationTestCase.this.recoverUnstartedReplica(replica, primary, targetSupplier, markAsRecovering, inSyncIds, + routingTable); + ESIndexLevelReplicationTestCase.this.startReplicaAfterRecovery(replica, primary, inSyncIds, routingTable); } public synchronized DiscoveryNode getPrimaryNode() { @@ -502,7 +505,7 @@ public ShardRouting routingEntry() { @Override public void failShard(String message, Exception exception) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("failing a primary isn't supported. failure: " + message, exception); } @Override @@ -575,13 +578,13 @@ public void onFailure(Exception e) { public void failShardIfNeeded(ShardRouting replica, String message, Exception exception, Runnable onSuccess, Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("failing shard " + replica + " isn't supported. failure: " + message, exception); } @Override public void markShardCopyAsStaleIfNeeded(ShardId shardId, String allocationId, Runnable onSuccess, Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("can't mark " + shardId + ", aid [" + allocationId + "] as stale"); } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 7f938d5cbd741..794a90624bf95 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -85,8 +85,10 @@ import java.util.HashSet; import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiFunction; +import java.util.function.Consumer; import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; import static org.hamcrest.Matchers.contains; @@ -101,6 +103,14 @@ public abstract class IndexShardTestCase extends ESTestCase { public static final IndexEventListener EMPTY_EVENT_LISTENER = new IndexEventListener() {}; + private static final AtomicBoolean failOnShardFailures = new AtomicBoolean(true); + + private static final Consumer DEFAULT_SHARD_FAILURE_HANDLER = failure -> { + if (failOnShardFailures.get()) { + throw new AssertionError(failure.reason, failure.cause); + } + }; + protected static final PeerRecoveryTargetService.RecoveryListener recoveryListener = new PeerRecoveryTargetService.RecoveryListener() { @Override public void onRecoveryDone(RecoveryState state) { @@ -121,6 +131,7 @@ public void setUp() throws Exception { super.setUp(); threadPool = new TestThreadPool(getClass().getName(), threadPoolSettings()); primaryTerm = randomIntBetween(1, 100); // use random but fixed term for creating shards + failOnShardFailures.set(true); } @Override @@ -132,6 +143,15 @@ public void tearDown() throws Exception { } } + /** + * by default, tests will fail if any shard created by this class fails. Tests that cause failures by design + * can call this method to ignore those failures + * + */ + protected void allowShardFailures() { + failOnShardFailures.set(false); + } + public Settings threadPoolSettings() { return Settings.EMPTY; } @@ -290,7 +310,7 @@ protected IndexShard newShard(ShardRouting routing, IndexMetaData indexMetaData, /** * creates a new initializing shard. - * @param routing shard routing to use + * @param routing shard routing to use * @param shardPath path to use for shard data * @param indexMetaData indexMetaData for the shard, including any mapping * @param indexSearcherWrapper an optional wrapper to be used during searchers @@ -322,6 +342,7 @@ protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMe engineFactory, indexEventListener, indexSearcherWrapper, threadPool, BigArrays.NON_RECYCLING_INSTANCE, warmer, Collections.emptyList(), Arrays.asList(listeners), globalCheckpointSyncer, breakerService); + indexShard.addShardFailureCallback(DEFAULT_SHARD_FAILURE_HANDLER); success = true; } finally { if (success == false) { @@ -398,7 +419,7 @@ protected IndexShard newStartedShard( if (primary) { recoverShardFromStore(shard); } else { - recoveryEmptyReplica(shard); + recoveryEmptyReplica(shard, true); } return shard; } @@ -440,11 +461,11 @@ public static void updateRoutingEntry(IndexShard shard, ShardRouting shardRoutin inSyncIds, newRoutingTable, Collections.emptySet()); } - protected void recoveryEmptyReplica(IndexShard replica) throws IOException { + protected void recoveryEmptyReplica(IndexShard replica, boolean startReplica) throws IOException { IndexShard primary = null; try { primary = newStartedShard(true); - recoverReplica(replica, primary); + recoverReplica(replica, primary, startReplica); } finally { closeShards(primary); } @@ -456,42 +477,48 @@ protected DiscoveryNode getFakeDiscoNode(String id) { } /** recovers a replica from the given primary **/ - protected void recoverReplica(IndexShard replica, IndexShard primary) throws IOException { + protected void recoverReplica(IndexShard replica, IndexShard primary, boolean startReplica) throws IOException { recoverReplica(replica, primary, (r, sourceNode) -> new RecoveryTarget(r, sourceNode, recoveryListener, version -> { }), - true); + true, true); } /** recovers a replica from the given primary **/ protected void recoverReplica(final IndexShard replica, final IndexShard primary, final BiFunction targetSupplier, - final boolean markAsRecovering) throws IOException { + final boolean markAsRecovering, final boolean markAsStarted) throws IOException { IndexShardRoutingTable.Builder newRoutingTable = new IndexShardRoutingTable.Builder(replica.shardId()); newRoutingTable.addShard(primary.routingEntry()); if (replica.routingEntry().isRelocationTarget() == false) { newRoutingTable.addShard(replica.routingEntry()); } - recoverReplica(replica, primary, targetSupplier, markAsRecovering, - Collections.singleton(primary.routingEntry().allocationId().getId()), - newRoutingTable.build()); + final Set inSyncIds = Collections.singleton(primary.routingEntry().allocationId().getId()); + final IndexShardRoutingTable routingTable = newRoutingTable.build(); + recoverUnstartedReplica(replica, primary, targetSupplier, markAsRecovering, inSyncIds, routingTable); + if (markAsStarted) { + startReplicaAfterRecovery(replica, primary, inSyncIds, routingTable); + } } /** * Recovers a replica from the give primary, allow the user to supply a custom recovery target. A typical usage of a custom recovery * target is to assert things in the various stages of recovery. + * + * Note: this method keeps the shard in {@link IndexShardState#POST_RECOVERY} and doesn't start it. + * * @param replica the recovery target shard * @param primary the recovery source shard * @param targetSupplier supplies an instance of {@link RecoveryTarget} * @param markAsRecovering set to {@code false} if the replica is marked as recovering */ - protected final void recoverReplica(final IndexShard replica, - final IndexShard primary, - final BiFunction targetSupplier, - final boolean markAsRecovering, - final Set inSyncIds, - final IndexShardRoutingTable routingTable) throws IOException { + protected final void recoverUnstartedReplica(final IndexShard replica, + final IndexShard primary, + final BiFunction targetSupplier, + final boolean markAsRecovering, + final Set inSyncIds, + final IndexShardRoutingTable routingTable) throws IOException { final DiscoveryNode pNode = getFakeDiscoNode(primary.routingEntry().currentNodeId()); final DiscoveryNode rNode = getFakeDiscoNode(replica.routingEntry().currentNodeId()); if (markAsRecovering) { @@ -519,11 +546,15 @@ protected final void recoverReplica(final IndexShard replica, request, (int) ByteSizeUnit.MB.toBytes(1), Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), pNode.getName()).build()); - final ShardRouting initializingReplicaRouting = replica.routingEntry(); primary.updateShardState(primary.routingEntry(), primary.getPrimaryTerm(), null, currentClusterStateVersion.incrementAndGet(), inSyncIds, routingTable, Collections.emptySet()); recovery.recoverToTarget(); recoveryTarget.markAsDone(); + } + + protected void startReplicaAfterRecovery(IndexShard replica, IndexShard primary, Set inSyncIds, + IndexShardRoutingTable routingTable) throws IOException { + ShardRouting initializingReplicaRouting = replica.routingEntry(); IndexShardRoutingTable newRoutingTable = initializingReplicaRouting.isRelocationTarget() ? new IndexShardRoutingTable.Builder(routingTable) @@ -543,6 +574,31 @@ protected final void recoverReplica(final IndexShard replica, currentClusterStateVersion.get(), inSyncIdsWithReplica, newRoutingTable, Collections.emptySet()); } + + /** + * promotes a replica to primary, incrementing it's term and starting it if needed + */ + protected void promoteReplica(IndexShard replica, Set inSyncIds, IndexShardRoutingTable routingTable) throws IOException { + assertThat(inSyncIds, contains(replica.routingEntry().allocationId().getId())); + final ShardRouting routingEntry = newShardRouting( + replica.routingEntry().shardId(), + replica.routingEntry().currentNodeId(), + null, + true, + ShardRoutingState.STARTED, + replica.routingEntry().allocationId()); + + final IndexShardRoutingTable newRoutingTable = new IndexShardRoutingTable.Builder(routingTable) + .removeShard(replica.routingEntry()) + .addShard(routingEntry) + .build(); + replica.updateShardState(routingEntry, replica.getPrimaryTerm() + 1, + (is, listener) -> + listener.onResponse(new PrimaryReplicaSyncer.ResyncTask(1, "type", "action", "desc", null, Collections.emptyMap())), + currentClusterStateVersion.incrementAndGet(), + inSyncIds, newRoutingTable, Collections.emptySet()); + } + private Store.MetadataSnapshot getMetadataSnapshotOrEmpty(IndexShard replica) throws IOException { Store.MetadataSnapshot result; try { @@ -605,7 +661,7 @@ protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id, shard.getLocalCheckpoint()); } else { result = shard.applyIndexOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0, - VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { throw new TransportReplicationAction.RetryOnReplicaException(shard.shardId, "Mappings are not available on the replica yet, triggered update: " + result.getRequiredMappingUpdate()); @@ -625,7 +681,7 @@ protected Engine.DeleteResult deleteDoc(IndexShard shard, String type, String id result = shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, type, id, VersionType.INTERNAL); shard.updateLocalCheckpointForShard(shard.routingEntry().allocationId().getId(), shard.getEngine().getLocalCheckpoint()); } else { - result = shard.applyDeleteOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0L, type, id, VersionType.EXTERNAL); + result = shard.applyDeleteOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0L, type, id); } return result; } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index e84f2a99a115d..3002711bdbd8e 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -62,6 +62,7 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase; @@ -69,7 +70,6 @@ import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; import org.junit.After; @@ -79,7 +79,12 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; import static org.elasticsearch.test.InternalAggregationTestCase.DEFAULT_MAX_BUCKETS; import static org.mockito.Matchers.anyObject; @@ -144,16 +149,50 @@ protected AggregatorFactory createAggregatorFactory(Query query, SearchLookup searchLookup = new SearchLookup(mapperService, ifds::getForField, new String[]{TYPE_NAME}); when(searchContext.lookup()).thenReturn(searchLookup); - QueryShardContext queryShardContext = queryShardContextMock(mapperService, fieldTypes, circuitBreakerService); + QueryShardContext queryShardContext = queryShardContextMock(mapperService); when(queryShardContext.getIndexSettings()).thenReturn(indexSettings); when(searchContext.getQueryShardContext()).thenReturn(queryShardContext); - for (MappedFieldType fieldType : fieldTypes) { - when(searchContext.smartNameFieldType(fieldType.name())).thenReturn(fieldType); - } + + Map fieldNameToType = new HashMap<>(); + fieldNameToType.putAll(Arrays.stream(fieldTypes) + .collect(Collectors.toMap(MappedFieldType::name, Function.identity()))); + fieldNameToType.putAll(getFieldAliases(fieldTypes)); + + registerFieldTypes(queryShardContext, searchContext, mapperService, + circuitBreakerService, fieldNameToType); return aggregationBuilder.build(searchContext, null); } + /** + * Allows subclasses to provide alternate names for the provided field type, which + * can be useful when testing aggregations on field aliases. + */ + protected Map getFieldAliases(MappedFieldType... fieldTypes) { + return Collections.emptyMap(); + } + + private void registerFieldTypes(QueryShardContext queryShardContext, + SearchContext searchContext, + MapperService mapperService, + CircuitBreakerService circuitBreakerService, + Map fieldNameToType) { + for (Map.Entry entry : fieldNameToType.entrySet()) { + String fieldName = entry.getKey(); + MappedFieldType fieldType = entry.getValue(); + + when(queryShardContext.fieldMapper(fieldName)).thenReturn(fieldType); + when(searchContext.smartNameFieldType(fieldName)).thenReturn(fieldType); + } + + for (MappedFieldType fieldType : new HashSet<>(fieldNameToType.values())) { + when(queryShardContext.getForField(fieldType)).then(invocation -> + fieldType.fielddataBuilder(mapperService.getIndexSettings().getIndex().getName()) + .build(mapperService.getIndexSettings(), fieldType, + new IndexFieldDataCache.None(), circuitBreakerService, mapperService)); + } + } + protected A createAggregator(AggregationBuilder aggregationBuilder, IndexSearcher indexSearcher, MappedFieldType... fieldTypes) throws IOException { @@ -257,16 +296,9 @@ protected MapperService mapperServiceMock() { /** * sub-tests that need a more complex mock can overwrite this */ - protected QueryShardContext queryShardContextMock(MapperService mapperService, MappedFieldType[] fieldTypes, - CircuitBreakerService circuitBreakerService) { + protected QueryShardContext queryShardContextMock(MapperService mapperService) { QueryShardContext queryShardContext = mock(QueryShardContext.class); when(queryShardContext.getMapperService()).thenReturn(mapperService); - for (MappedFieldType fieldType : fieldTypes) { - when(queryShardContext.fieldMapper(fieldType.name())).thenReturn(fieldType); - when(queryShardContext.getForField(fieldType)).then(invocation -> fieldType.fielddataBuilder(mapperService.getIndexSettings() - .getIndex().getName()) - .build(mapperService.getIndexSettings(), fieldType, new IndexFieldDataCache.None(), circuitBreakerService, mapperService)); - } NestedScope nestedScope = new NestedScope(); when(queryShardContext.isFilter()).thenCallRealMethod(); Mockito.doCallRealMethod().when(queryShardContext).setIsFilter(Matchers.anyBoolean()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index e50e93ec672b8..a891f30b93d21 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -83,7 +83,9 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.function.Function; import java.util.stream.Stream; @@ -94,21 +96,35 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { public static final String STRING_FIELD_NAME = "mapped_string"; + public static final String STRING_ALIAS_FIELD_NAME = "mapped_string_alias"; protected static final String STRING_FIELD_NAME_2 = "mapped_string_2"; protected static final String INT_FIELD_NAME = "mapped_int"; + protected static final String INT_ALIAS_FIELD_NAME = "mapped_int_field_alias"; protected static final String INT_RANGE_FIELD_NAME = "mapped_int_range"; protected static final String DOUBLE_FIELD_NAME = "mapped_double"; protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean"; protected static final String DATE_FIELD_NAME = "mapped_date"; + protected static final String DATE_ALIAS_FIELD_NAME = "mapped_date_alias"; protected static final String DATE_RANGE_FIELD_NAME = "mapped_date_range"; protected static final String OBJECT_FIELD_NAME = "mapped_object"; protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; + protected static final String GEO_POINT_ALIAS_FIELD_NAME = "mapped_geo_point_alias"; protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; - protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME, - DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, - GEO_SHAPE_FIELD_NAME}; - protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME, - DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, GEO_POINT_FIELD_NAME, }; + protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, + INT_FIELD_NAME, INT_RANGE_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, + DATE_RANGE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME, + GEO_SHAPE_FIELD_NAME}; + protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[]{STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, + INT_FIELD_NAME, INT_RANGE_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, + DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME}; + + private static final Map ALIAS_TO_CONCRETE_FIELD_NAME = new HashMap<>(); + static { + ALIAS_TO_CONCRETE_FIELD_NAME.put(STRING_ALIAS_FIELD_NAME, STRING_FIELD_NAME); + ALIAS_TO_CONCRETE_FIELD_NAME.put(INT_ALIAS_FIELD_NAME, INT_FIELD_NAME); + ALIAS_TO_CONCRETE_FIELD_NAME.put(DATE_ALIAS_FIELD_NAME, DATE_FIELD_NAME); + ALIAS_TO_CONCRETE_FIELD_NAME.put(GEO_POINT_ALIAS_FIELD_NAME, GEO_POINT_FIELD_NAME); + } protected static Version indexVersionCreated; @@ -200,6 +216,13 @@ protected Settings indexSettings() { .build(); } + protected static String expectedFieldName(String builderFieldName) { + if (currentTypes.length == 0) { + return builderFieldName; + } + return ALIAS_TO_CONCRETE_FIELD_NAME.getOrDefault(builderFieldName, builderFieldName); + } + @AfterClass public static void afterClass() throws Exception { IOUtils.close(serviceHolder); @@ -356,19 +379,24 @@ public void onRemoval(ShardId shardId, Accountable accountable) { } }); + for (String type : currentTypes) { mapperService.merge(type, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(type, - STRING_FIELD_NAME, "type=text", - STRING_FIELD_NAME_2, "type=keyword", - INT_FIELD_NAME, "type=integer", - INT_RANGE_FIELD_NAME, "type=integer_range", - DOUBLE_FIELD_NAME, "type=double", - BOOLEAN_FIELD_NAME, "type=boolean", - DATE_FIELD_NAME, "type=date", - DATE_RANGE_FIELD_NAME, "type=date_range", - OBJECT_FIELD_NAME, "type=object", - GEO_POINT_FIELD_NAME, "type=geo_point", - GEO_SHAPE_FIELD_NAME, "type=geo_shape" + STRING_FIELD_NAME, "type=text", + STRING_FIELD_NAME_2, "type=keyword", + STRING_ALIAS_FIELD_NAME, "type=alias,path=" + STRING_FIELD_NAME, + INT_FIELD_NAME, "type=integer", + INT_ALIAS_FIELD_NAME, "type=alias,path=" + INT_FIELD_NAME, + INT_RANGE_FIELD_NAME, "type=integer_range", + DOUBLE_FIELD_NAME, "type=double", + BOOLEAN_FIELD_NAME, "type=boolean", + DATE_FIELD_NAME, "type=date", + DATE_ALIAS_FIELD_NAME, "type=alias,path=" + DATE_FIELD_NAME, + DATE_RANGE_FIELD_NAME, "type=date_range", + OBJECT_FIELD_NAME, "type=object", + GEO_POINT_FIELD_NAME, "type=geo_point", + GEO_POINT_ALIAS_FIELD_NAME, "type=alias,path=" + GEO_POINT_FIELD_NAME, + GEO_SHAPE_FIELD_NAME, "type=geo_shape" ))), MapperService.MergeReason.MAPPING_UPDATE); // also add mappings for two inner field in the object field mapperService.merge(type, new CompressedXContent("{\"properties\":{\"" + OBJECT_FIELD_NAME + "\":{\"type\":\"object\"," diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index a2acc5371a19e..c1efd9d8e6a4d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -522,7 +522,7 @@ private void assertLuceneQuery(QB queryBuilder, Query query, SearchContext conte */ protected abstract void doAssertLuceneQuery(QB queryBuilder, Query query, SearchContext context) throws IOException; - protected static void assertTermOrBoostQuery(Query query, String field, String value, float fieldBoost) { + protected void assertTermOrBoostQuery(Query query, String field, String value, float fieldBoost) { if (fieldBoost != AbstractQueryBuilder.DEFAULT_BOOST) { assertThat(query, instanceOf(BoostQuery.class)); BoostQuery boostQuery = (BoostQuery) query; @@ -532,10 +532,12 @@ protected static void assertTermOrBoostQuery(Query query, String field, String v assertTermQuery(query, field, value); } - protected static void assertTermQuery(Query query, String field, String value) { + protected void assertTermQuery(Query query, String field, String value) { assertThat(query, instanceOf(TermQuery.class)); TermQuery termQuery = (TermQuery) query; - assertThat(termQuery.getTerm().field(), equalTo(field)); + + String expectedFieldName = expectedFieldName(field); + assertThat(termQuery.getTerm().field(), equalTo(expectedFieldName)); assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(value.toLowerCase(Locale.ROOT))); } @@ -625,6 +627,7 @@ protected static Object getRandomValueForFieldName(String fieldName) { Object value; switch (fieldName) { case STRING_FIELD_NAME: + case STRING_ALIAS_FIELD_NAME: if (rarely()) { // unicode in 10% cases JsonStringEncoder encoder = JsonStringEncoder.getInstance(); @@ -783,4 +786,8 @@ protected QueryBuilder rewriteAndFetch(QueryBuilder builder, QueryRewriteContext Rewriteable.rewriteAndFetch(builder, context, future); return future.actionGet(); } + + public boolean isTextField(String fieldName) { + return fieldName.equals(STRING_FIELD_NAME) || fieldName.equals(STRING_ALIAS_FIELD_NAME); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 9cdfc6776f883..4c00527a93267 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -125,6 +125,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.time.ZoneId; +import java.security.Security; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -132,6 +133,7 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Random; @@ -1363,4 +1365,8 @@ public TestAnalysis(IndexAnalyzers indexAnalyzers, } } + public static boolean inFipsJvm() { + return Security.getProviders()[0].getName().toLowerCase(Locale.ROOT).contains("fips"); + } + } diff --git a/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc b/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc index d93d4e523d9f2..49913382482bd 100644 --- a/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc +++ b/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc @@ -1,12 +1,13 @@ [role="xpack"] [[configuring-tls-docker]] -=== Encrypting Communications in an {es} Docker Image +=== Encrypting Communications in an {es} Docker Container Starting with version 6.0.0, {security} (Gold, Platinum or Enterprise subscriptions) https://www.elastic.co/guide/en/elasticsearch/reference/6.0/breaking-6.0.0-xes.html[requires SSL/TLS] encryption for the transport networking layer. This section demonstrates an easy path to get started with SSL/TLS for both -HTTPS and transport using the `elasticsearch-platinum` docker image. +HTTPS and transport using the {es} Docker image. The example uses +Docker Compose to manage the containers. For further details, please refer to {xpack-ref}/encrypting-communications.html[Encrypting Communications] and @@ -17,7 +18,7 @@ https://www.elastic.co/subscriptions[available subscriptions]. <>. -Inside a new, empty, directory create the following **four files**: +Inside a new, empty directory, create the following four files: `instances.yml`: ["source","yaml"] @@ -29,6 +30,7 @@ instances: - localhost ip: - 127.0.0.1 + - name: es02 dns: - es02 @@ -60,6 +62,7 @@ ifeval::["{release-state}"!="unreleased"] ["source","yaml",subs="attributes"] ---- version: '2.2' + services: create_certs: container_name: create_certs @@ -96,6 +99,7 @@ ifeval::["{release-state}"!="unreleased"] ["source","yaml",subs="attributes"] ---- version: '2.2' + services: es01: container_name: es01 @@ -105,9 +109,11 @@ services: - discovery.zen.minimum_master_nodes=2 - ELASTIC_PASSWORD=$ELASTIC_PASSWORD <1> - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + - xpack.license.self_generated.type=trial <2> + - xpack.security.enabled=true - xpack.security.http.ssl.enabled=true - xpack.security.transport.ssl.enabled=true - - xpack.security.transport.ssl.verification_mode=certificate <2> + - xpack.security.transport.ssl.verification_mode=certificate <3> - xpack.ssl.certificate_authorities=$CERTS_DIR/ca/ca.crt - xpack.ssl.certificate=$CERTS_DIR/es01/es01.crt - xpack.ssl.key=$CERTS_DIR/es01/es01.key @@ -119,15 +125,18 @@ services: interval: 30s timeout: 10s retries: 5 + es02: container_name: es02 - image: docker.elastic.co/elasticsearch/elasticsearch-platinum:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:{version} environment: - node.name=es02 - discovery.zen.minimum_master_nodes=2 - ELASTIC_PASSWORD=$ELASTIC_PASSWORD - discovery.zen.ping.unicast.hosts=es01 - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + - xpack.license.self_generated.type=trial + - xpack.security.enabled=true - xpack.security.http.ssl.enabled=true - xpack.security.transport.ssl.enabled=true - xpack.security.transport.ssl.verification_mode=certificate @@ -135,16 +144,20 @@ services: - xpack.ssl.certificate=$CERTS_DIR/es02/es02.crt - xpack.ssl.key=$CERTS_DIR/es02/es02.key volumes: ['esdata_02:/usr/share/elasticsearch/data', './certs:$CERTS_DIR'] + wait_until_ready: - image: docker.elastic.co/elasticsearch/elasticsearch-platinum:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:{version} command: /usr/bin/true depends_on: {"es01": {"condition": "service_healthy"}} + volumes: {"esdata_01": {"driver": "local"}, "esdata_02": {"driver": "local"}} ---- -<1> Bootstrap `elastic` with the password defined in `.env`. See +<1> Bootstrap `elastic` with the password defined in `.env`. See {stack-ov}/built-in-users.html#bootstrap-elastic-passwords[the Elastic Bootstrap Password]. -<2> Disable verification of authenticity for inter-node communication. Allows +<2> Automatically generate and apply a trial subscription, in order to enable +{security}. +<3> Disable verification of authenticity for inter-node communication. Allows creating self-signed certificates without having to pin specific internal IP addresses. endif::[] diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 20ae41f10dc68..ca529496bf168 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -151,7 +151,6 @@ integTestCluster { setting 'xpack.license.self_generated.type', 'trial' keystoreSetting 'bootstrap.password', 'x-pack-test-password' keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass' - keystoreSetting 'xpack.security.ingest.hash.processor.key', 'hmackey' distribution = 'zip' // this is important since we use the reindex module in ML setupCommand 'setupTestUser', 'bin/elasticsearch-users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser' diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java index 5b648a4cf8d86..9a29560581385 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java @@ -79,7 +79,6 @@ public static WritePrimaryResult randomFrom(Translog.Operation.Type.values())); switch (type) { case INDEX: - operations.add(new Translog.Index("_doc", id, seqNo, primaryTerm, 0, VersionType.INTERNAL, SOURCE, null, -1)); + operations.add(new Translog.Index("_doc", id, seqNo, primaryTerm, 0, SOURCE, null, -1)); break; case DELETE: operations.add( - new Translog.Delete("_doc", id, new Term("_id", Uid.encodeId(id)), seqNo, primaryTerm, 0, VersionType.INTERNAL)); + new Translog.Delete("_doc", id, new Term("_id", Uid.encodeId(id)), seqNo, primaryTerm, 0)); break; case NO_OP: operations.add(new Translog.NoOp(seqNo, primaryTerm, "test")); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java index 54a56c4c3913d..d7eeb86a35fb9 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; @@ -40,15 +39,13 @@ public void testDoNotFillGaps() throws Exception { final String id = Long.toString(i); SourceToParse sourceToParse = SourceToParse.source(indexShard.shardId().getIndexName(), "_doc", id, new BytesArray("{}"), XContentType.JSON); - indexShard.applyIndexOperationOnReplica(++seqNo, - 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + indexShard.applyIndexOperationOnReplica(++seqNo, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); } long seqNoBeforeGap = seqNo; seqNo += 8; SourceToParse sourceToParse = SourceToParse.source(indexShard.shardId().getIndexName(), "_doc", "9", new BytesArray("{}"), XContentType.JSON); - indexShard.applyIndexOperationOnReplica(seqNo, - 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + indexShard.applyIndexOperationOnReplica(seqNo, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); // promote the replica to primary: final ShardRouting replicaRouting = indexShard.routingEntry(); diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 5db149bc6774e..ca926fa0d54cc 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -6,6 +6,8 @@ import java.nio.file.Paths import java.nio.file.StandardCopyOption apply plugin: 'elasticsearch.esplugin' +apply plugin: 'nebula.maven-base-publish' +apply plugin: 'nebula.maven-scm' archivesBaseName = 'x-pack-core' diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java index 1c2808c70ffcf..fd0fde76e6883 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java @@ -5,14 +5,8 @@ */ package org.elasticsearch.xpack.core.ml.job.config; -import org.apache.lucene.analysis.Analyzer; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -22,15 +16,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisRegistry; -import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.CustomAnalyzer; -import org.elasticsearch.index.analysis.CustomAnalyzerProvider; -import org.elasticsearch.index.analysis.TokenFilterFactory; -import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.rest.action.admin.indices.RestAnalyzeAction; import org.elasticsearch.xpack.core.ml.MlParserType; @@ -42,12 +27,11 @@ import java.util.Map; import java.util.Objects; - /** * Configuration for the categorization analyzer. * * The syntax is a subset of what can be supplied to the {@linkplain RestAnalyzeAction _analyze endpoint}. - * To summarise, the first option is to specify the name of an out-of-the-box analyzer: + * To summarize, the first option is to specify the name of an out-of-the-box analyzer: * * "categorization_analyzer" : "standard" * @@ -66,11 +50,6 @@ * { "type" : "pattern_replace", "pattern": "^[0-9].*" } * ] * - * - * Unfortunately there is no easy to to reuse a subset of the _analyze action implementation, so much - * of the code in this file is copied from {@link TransportAnalyzeAction}. Unfortunately the logic required here is - * not quite identical to that of {@link TransportAnalyzeAction}, and the required code is hard to partially reuse. - * TODO: consider refactoring ES core to allow more reuse. */ public class CategorizationAnalyzerConfig implements ToXContentFragment, Writeable { @@ -350,175 +329,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * Convert the config to an {@link Analyzer}. This may be a global analyzer or a newly created custom analyzer. - * In the case of a global analyzer the caller must NOT close it when they have finished with it. In the case of - * a newly created custom analyzer the caller is responsible for closing it. - * @return The first tuple member is the {@link Analyzer}; the second indicates whether the caller is responsible - * for closing it. - */ - public Tuple toAnalyzer(AnalysisRegistry analysisRegistry, Environment environment) throws IOException { - if (analyzer != null) { - Analyzer globalAnalyzer = analysisRegistry.getAnalyzer(analyzer); - if (globalAnalyzer == null) { - throw new IllegalArgumentException("Failed to find global analyzer [" + analyzer + "]"); - } - return new Tuple<>(globalAnalyzer, Boolean.FALSE); - } else { - List charFilterFactoryList = - parseCharFilterFactories(analysisRegistry, environment); - - Tuple tokenizerFactory = parseTokenizerFactory(analysisRegistry, - environment); - - List tokenFilterFactoryList = parseTokenFilterFactories(analysisRegistry, - environment, tokenizerFactory, charFilterFactoryList); - - return new Tuple<>(new CustomAnalyzer(tokenizerFactory.v1(), tokenizerFactory.v2(), - charFilterFactoryList.toArray(new CharFilterFactory[charFilterFactoryList.size()]), - tokenFilterFactoryList.toArray(new TokenFilterFactory[tokenFilterFactoryList.size()])), Boolean.TRUE); - } - } - - - /** - * Get char filter factories for each configured char filter. Each configuration - * element can be the name of an out-of-the-box char filter, or a custom definition. - */ - private List parseCharFilterFactories(AnalysisRegistry analysisRegistry, - Environment environment) throws IOException { - final List charFilterFactoryList = new ArrayList<>(); - for (NameOrDefinition charFilter : charFilters) { - final CharFilterFactory charFilterFactory; - if (charFilter.name != null) { - AnalysisModule.AnalysisProvider charFilterFactoryFactory = - analysisRegistry.getCharFilterProvider(charFilter.name); - if (charFilterFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global char filter under [" + charFilter.name + "]"); - } - charFilterFactory = charFilterFactoryFactory.get(environment, charFilter.name); - } else { - String charFilterTypeName = charFilter.definition.get("type"); - if (charFilterTypeName == null) { - throw new IllegalArgumentException("Missing [type] setting for char filter: " + charFilter.definition); - } - AnalysisModule.AnalysisProvider charFilterFactoryFactory = - analysisRegistry.getCharFilterProvider(charFilterTypeName); - if (charFilterFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global char filter under [" + charFilterTypeName + "]"); - } - Settings settings = augmentSettings(charFilter.definition); - // Need to set anonymous "name" of char_filter - charFilterFactory = charFilterFactoryFactory.get(buildDummyIndexSettings(settings), environment, - "_anonymous_charfilter", settings); - } - if (charFilterFactory == null) { - throw new IllegalArgumentException("Failed to find char filter [" + charFilter + "]"); - } - charFilterFactoryList.add(charFilterFactory); - } - return charFilterFactoryList; - } - - /** - * Get the tokenizer factory for the configured tokenizer. The configuration - * can be the name of an out-of-the-box tokenizer, or a custom definition. - */ - private Tuple parseTokenizerFactory(AnalysisRegistry analysisRegistry, - Environment environment) throws IOException { - final String name; - final TokenizerFactory tokenizerFactory; - if (tokenizer.name != null) { - name = tokenizer.name; - AnalysisModule.AnalysisProvider tokenizerFactoryFactory = analysisRegistry.getTokenizerProvider(name); - if (tokenizerFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global tokenizer under [" + name + "]"); - } - tokenizerFactory = tokenizerFactoryFactory.get(environment, name); - } else { - String tokenizerTypeName = tokenizer.definition.get("type"); - if (tokenizerTypeName == null) { - throw new IllegalArgumentException("Missing [type] setting for tokenizer: " + tokenizer.definition); - } - AnalysisModule.AnalysisProvider tokenizerFactoryFactory = - analysisRegistry.getTokenizerProvider(tokenizerTypeName); - if (tokenizerFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global tokenizer under [" + tokenizerTypeName + "]"); - } - Settings settings = augmentSettings(tokenizer.definition); - // Need to set anonymous "name" of tokenizer - name = "_anonymous_tokenizer"; - tokenizerFactory = tokenizerFactoryFactory.get(buildDummyIndexSettings(settings), environment, name, settings); - } - return new Tuple<>(name, tokenizerFactory); - } - - /** - * Get token filter factories for each configured token filter. Each configuration - * element can be the name of an out-of-the-box token filter, or a custom definition. - */ - private List parseTokenFilterFactories(AnalysisRegistry analysisRegistry, Environment environment, - Tuple tokenizerFactory, - List charFilterFactoryList) throws IOException { - final List tokenFilterFactoryList = new ArrayList<>(); - for (NameOrDefinition tokenFilter : tokenFilters) { - TokenFilterFactory tokenFilterFactory; - if (tokenFilter.name != null) { - AnalysisModule.AnalysisProvider tokenFilterFactoryFactory; - tokenFilterFactoryFactory = analysisRegistry.getTokenFilterProvider(tokenFilter.name); - if (tokenFilterFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global token filter under [" + tokenFilter.name + "]"); - } - tokenFilterFactory = tokenFilterFactoryFactory.get(environment, tokenFilter.name); - } else { - String filterTypeName = tokenFilter.definition.get("type"); - if (filterTypeName == null) { - throw new IllegalArgumentException("Missing [type] setting for token filter: " + tokenFilter.definition); - } - AnalysisModule.AnalysisProvider tokenFilterFactoryFactory = - analysisRegistry.getTokenFilterProvider(filterTypeName); - if (tokenFilterFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global token filter under [" + filterTypeName + "]"); - } - Settings settings = augmentSettings(tokenFilter.definition); - // Need to set anonymous "name" of token_filter - tokenFilterFactory = tokenFilterFactoryFactory.get(buildDummyIndexSettings(settings), environment, - "_anonymous_tokenfilter", settings); - tokenFilterFactory = CustomAnalyzerProvider.checkAndApplySynonymFilter(tokenFilterFactory, tokenizerFactory.v1(), - tokenizerFactory.v2(), tokenFilterFactoryList, charFilterFactoryList, environment); - } - if (tokenFilterFactory == null) { - throw new IllegalArgumentException("Failed to find or create token filter [" + tokenFilter + "]"); - } - tokenFilterFactoryList.add(tokenFilterFactory); - } - return tokenFilterFactoryList; - } - - /** - * The Elasticsearch analysis functionality is designed to work with indices. For - * categorization we have to pretend we've got some index settings. - */ - private IndexSettings buildDummyIndexSettings(Settings settings) { - IndexMetaData metaData = IndexMetaData.builder(IndexMetaData.INDEX_UUID_NA_VALUE).settings(settings).build(); - return new IndexSettings(metaData, Settings.EMPTY); - } - - /** - * The behaviour of Elasticsearch analyzers can vary between versions. - * For categorization we'll always use the latest version of the text analysis. - * The other settings are just to stop classes that expect to be associated with - * an index from complaining. - */ - private Settings augmentSettings(Settings settings) { - return Settings.builder().put(settings) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) - .build(); - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -609,17 +419,5 @@ public CategorizationAnalyzerConfig build() { } return new CategorizationAnalyzerConfig(analyzer, charFilters, tokenizer, tokenFilters); } - - /** - * Verify that the builder will build a valid config. This is not done as part of the basic build - * because it verifies that the names of analyzers/tokenizers/filters referenced by the config are - * known, and the validity of these names could change over time. - */ - public void verify(AnalysisRegistry analysisRegistry, Environment environment) throws IOException { - Tuple tuple = build().toAnalyzer(analysisRegistry, environment); - if (tuple.v2()) { - tuple.v1().close(); - } - } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index c8290521f98e4..560bac895fa69 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -21,8 +21,6 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.xpack.core.ml.MlParserType; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; @@ -809,8 +807,8 @@ public Builder setAnalysisConfig(AnalysisConfig.Builder configBuilder) { return this; } - public AnalysisLimits getAnalysisLimits() { - return analysisLimits; + public AnalysisConfig getAnalysisConfig() { + return analysisConfig; } public Builder setAnalysisLimits(AnalysisLimits analysisLimits) { @@ -1135,18 +1133,6 @@ public void validateAnalysisLimitsAndSetDefaults(@Nullable ByteSizeValue maxMode AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB); } - /** - * Validate the char filter/tokenizer/token filter names used in the categorization analyzer config (if any). - * The overall structure can be validated at parse time, but the exact names need to be checked separately, - * as plugins that provide the functionality can be installed/uninstalled. - */ - public void validateCategorizationAnalyzer(AnalysisRegistry analysisRegistry, Environment environment) throws IOException { - CategorizationAnalyzerConfig categorizationAnalyzerConfig = analysisConfig.getCategorizationAnalyzerConfig(); - if (categorizationAnalyzerConfig != null) { - new CategorizationAnalyzerConfig.Builder(categorizationAnalyzerConfig).verify(analysisRegistry, environment); - } - } - private void validateGroups() { for (String group : this.groups) { if (MlStrings.isValidId(group) == false) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java index 8f66af14dfcad..1617a92b550ac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java @@ -158,7 +158,7 @@ public static X509ExtendedKeyManager keyManager(Certificate[] certificateChain, private static KeyStore getKeyStore(Certificate[] certificateChain, PrivateKey privateKey, char[] password) throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException { - KeyStore keyStore = KeyStore.getInstance("jks"); + KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); keyStore.load(null, null); // password must be non-null for keystore... keyStore.setKeyEntry("key", privateKey, password, certificateChain); @@ -242,7 +242,7 @@ public static X509ExtendedTrustManager trustManager(Certificate[] certificates) static KeyStore trustStore(Certificate[] certificates) throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException { assert certificates != null : "Cannot create trust store with null certificates"; - KeyStore store = KeyStore.getInstance("jks"); + KeyStore store = KeyStore.getInstance(KeyStore.getDefaultType()); store.load(null, null); int counter = 0; for (Certificate certificate : certificates) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java index 10c4f0fffc37c..063f1f655a46e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java @@ -8,6 +8,8 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchAction; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchRequestBuilder; @@ -29,9 +31,7 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequestBuilder; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchAction; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequestBuilder; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.service.WatcherServiceAction; import org.elasticsearch.xpack.core.watcher.transport.actions.service.WatcherServiceRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.service.WatcherServiceRequestBuilder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java index 56cedc457bda7..509116b018e2b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.put; import org.elasticsearch.action.Action; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; /** * This action puts an watch into the watch index and adds it to the scheduler diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchRequestBuilder.java index 050ac38dbb00c..840ff560fba11 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchRequestBuilder.java @@ -9,6 +9,8 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder; public class PutWatchRequestBuilder extends ActionRequestBuilder { @@ -43,7 +45,7 @@ public PutWatchRequestBuilder setSource(BytesReference source, XContentType xCon * @param source the source of the watch to be created */ public PutWatchRequestBuilder setSource(WatchSourceBuilder source) { - request.setSource(source); + request.setSource(source.buildAsBytes(XContentType.JSON), XContentType.JSON); return this; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchResponse.java deleted file mode 100644 index b84901ecac163..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchResponse.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.watcher.transport.actions.put; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; - -import java.io.IOException; - -/** - * The Response for a put watch action - */ -public class PutWatchResponse extends ActionResponse { - - private String id; - private long version; - private boolean created; - - public PutWatchResponse() { - } - - public PutWatchResponse(String id, long version, boolean created) { - this.id = id; - this.version = version; - this.created = created; - } - - public String getId() { - return id; - } - - public long getVersion() { - return version; - } - - public boolean isCreated() { - return created; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(id); - out.writeVLong(version); - out.writeBoolean(created); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - id = in.readString(); - version = in.readVLong(); - created = in.readBoolean(); - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterTests.java index a58491dea3bc4..00d1c47cdedaa 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterTests.java @@ -153,6 +153,7 @@ public void testClusterRestartWhileExpired() throws Exception { } public void testClusterRestartWithOldSignature() throws Exception { + assumeFalse("Can't run in a FIPS JVM. We can't generate old licenses since PBEWithSHA1AndDESede is not available", inFipsJvm()); wipeAllLicenses(); internalCluster().startNode(); ensureGreen(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/SelfGeneratedLicenseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/SelfGeneratedLicenseTests.java index 4e061623ccd94..0b61604ed7cc9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/SelfGeneratedLicenseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/SelfGeneratedLicenseTests.java @@ -76,6 +76,7 @@ public void testFrom1x() throws Exception { } public void testTrialLicenseVerifyWithOlderVersion() throws Exception { + assumeFalse("Can't run in a FIPS JVM. We can't generate old licenses since PBEWithSHA1AndDESede is not available", inFipsJvm()); long issueDate = System.currentTimeMillis(); License.Builder specBuilder = License.builder() .issuedTo("customer") diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java index cf6ab53b9f57b..20259144b420b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; import java.math.BigInteger; import java.net.InetAddress; @@ -32,6 +33,11 @@ */ public class CertGenUtilsTests extends ESTestCase { + @BeforeClass + public static void muteInFips(){ + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); + } + public void testSerialNotRepeated() { int iterations = scaledRandomIntBetween(10, 100); List list = new ArrayList<>(iterations); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java index 880cb03a64fc5..b82275a883311 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java @@ -53,6 +53,7 @@ public void testReadPKCS8EcKey() throws Exception { } public void testReadEncryptedPKCS8Key() throws Exception { + assumeFalse("Can't run in a FIPS JVM, PBE KeySpec is not available", inFipsJvm()); Key key = getKeyFromKeystore("RSA"); assertThat(key, notNullValue()); assertThat(key, instanceOf(PrivateKey.class)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java index 72cd13471df1f..d8e0b693f7008 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java @@ -42,7 +42,10 @@ import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.security.UnrecoverableKeyException; +import java.security.cert.Certificate; import java.security.cert.CertificateException; +import java.util.Collections; +import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.function.Consumer; @@ -76,6 +79,7 @@ public void cleanup() throws Exception { * Tests reloading a keystore that is used in the KeyManager of SSLContext */ public void testReloadingKeyStore() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); final Path tempDir = createTempDir(); final Path keystorePath = tempDir.resolve("testnode.jks"); final Path updatedKeystorePath = tempDir.resolve("testnode_updated.jks"); @@ -133,12 +137,10 @@ public void testPEMKeyConfigReloading() throws Exception { Path updatedKeyPath = tempDir.resolve("testnode_updated.pem"); Path certPath = tempDir.resolve("testnode.crt"); Path updatedCertPath = tempDir.resolve("testnode_updated.crt"); - final Path clientTruststorePath = tempDir.resolve("testnode.jks"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"), keyPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem"), updatedKeyPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), updatedCertPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"), certPath); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"), clientTruststorePath); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); final Settings settings = Settings.builder() @@ -150,7 +152,7 @@ public void testPEMKeyConfigReloading() throws Exception { final Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); // Load HTTPClient once. Client uses a keystore containing testnode key/cert as a truststore - try (CloseableHttpClient client = getSSLClient(clientTruststorePath, "testnode")) { + try (CloseableHttpClient client = getSSLClient(Collections.singletonList(certPath))) { final Consumer keyMaterialPreChecks = (context) -> { try (MockWebServer server = new MockWebServer(context, false)) { server.enqueue(new MockResponse().setResponseCode(200).setBody("body")); @@ -190,6 +192,7 @@ public void testPEMKeyConfigReloading() throws Exception { * reloadable SSLContext used in the HTTPClient) and as a KeyStore for the MockWebServer */ public void testReloadingTrustStore() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path tempDir = createTempDir(); Path trustStorePath = tempDir.resolve("testnode.jks"); Path updatedTruststorePath = tempDir.resolve("testnode_updated.jks"); @@ -240,19 +243,21 @@ public void testReloadingTrustStore() throws Exception { */ public void testReloadingPEMTrustConfig() throws Exception { Path tempDir = createTempDir(); - Path clientCertPath = tempDir.resolve("testnode.crt"); - Path keyStorePath = tempDir.resolve("testnode.jks"); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"), keyStorePath); + Path serverCertPath = tempDir.resolve("testnode.crt"); + Path serverKeyPath = tempDir.resolve("testnode.pem"); + Path updatedCert = tempDir.resolve("updated.crt"); //Our keystore contains two Certificates it can present. One build from the RSA keypair and one build from the EC keypair. EC is // used since it keyManager presents the first one in alias alphabetical order (and testnode_ec comes before testnode_rsa) - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt"), clientCertPath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"), serverCertPath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"), serverKeyPath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), updatedCert); Settings settings = Settings.builder() - .putList("xpack.ssl.certificate_authorities", clientCertPath.toString()) - .put("path.home", createTempDir()) - .build(); + .put("xpack.ssl.certificate_authorities", serverCertPath) + .put("path.home", createTempDir()) + .build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); // Create the MockWebServer once for both pre and post checks - try (MockWebServer server = getSslServer(keyStorePath, "testnode")) { + try (MockWebServer server = getSslServer(serverKeyPath, serverCertPath, "testnode")) { final Consumer trustMaterialPreChecks = (context) -> { try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { privilegedConnect(() -> client.execute(new HttpGet("https://localhost:" + server.getPort())).close()); @@ -263,10 +268,7 @@ public void testReloadingPEMTrustConfig() throws Exception { final Runnable modifier = () -> { try { - Path updatedCert = tempDir.resolve("updated.crt"); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), - updatedCert, StandardCopyOption.REPLACE_EXISTING); - atomicMoveIfPossible(updatedCert, clientCertPath); + atomicMoveIfPossible(updatedCert, serverCertPath); } catch (Exception e) { throw new RuntimeException("failed to modify file", e); } @@ -277,7 +279,7 @@ public void testReloadingPEMTrustConfig() throws Exception { try (CloseableHttpClient client = HttpClients.custom().setSSLContext(updatedContext).build()) { SSLHandshakeException sslException = expectThrows(SSLHandshakeException.class, () -> privilegedConnect(() -> client.execute(new HttpGet("https://localhost:" + server.getPort())).close())); - assertThat(sslException.getCause().getMessage(), containsString("PKIX path building failed")); + assertThat(sslException.getCause().getMessage(), containsString("PKIX path validation failed")); } catch (Exception e) { throw new RuntimeException("Error closing CloseableHttpClient", e); } @@ -291,16 +293,17 @@ public void testReloadingPEMTrustConfig() throws Exception { * that is being monitored */ public void testReloadingKeyStoreException() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path tempDir = createTempDir(); Path keystorePath = tempDir.resolve("testnode.jks"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"), keystorePath); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", keystorePath) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()) - .build(); + .put("xpack.ssl.keystore.path", keystorePath) + .setSecureSettings(secureSettings) + .put("path.home", createTempDir()) + .build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); @@ -336,12 +339,12 @@ public void testReloadingPEMKeyConfigException() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.key", keyPath) - .put("xpack.ssl.certificate", certPath) - .putList("xpack.ssl.certificate_authorities", certPath.toString(), clientCertPath.toString()) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .putList("xpack.ssl.certificate_authorities", certPath.toString(), clientCertPath.toString()) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); @@ -373,10 +376,10 @@ public void testTrustStoreReloadException() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.truststore.path", trustStorePath) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.truststore.path", trustStorePath) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); @@ -482,6 +485,20 @@ private static MockWebServer getSslServer(Path keyStorePath, String keyStorePass return server; } + private static MockWebServer getSslServer(Path keyPath, Path certPath, String password) throws KeyStoreException, CertificateException, + NoSuchAlgorithmException, IOException, KeyManagementException, UnrecoverableKeyException { + KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); + keyStore.load(null, password.toCharArray()); + keyStore.setKeyEntry("testnode_ec", PemUtils.readPrivateKey(keyPath, password::toCharArray), password.toCharArray(), + CertParsingUtils.readCertificates(Collections.singletonList(certPath))); + final SSLContext sslContext = new SSLContextBuilder().loadKeyMaterial(keyStore, password.toCharArray()) + .build(); + MockWebServer server = new MockWebServer(sslContext, false); + server.enqueue(new MockResponse().setResponseCode(200).setBody("body")); + server.start(); + return server; + } + private static CloseableHttpClient getSSLClient(Path trustStorePath, String trustStorePass) throws KeyStoreException, NoSuchAlgorithmException, KeyManagementException, IOException, CertificateException { @@ -493,6 +510,23 @@ private static CloseableHttpClient getSSLClient(Path trustStorePath, String trus return HttpClients.custom().setSSLContext(sslContext).build(); } + /** + * Creates a {@link CloseableHttpClient} that only trusts the given certificate(s) + * + * @param trustedCertificatePaths The certificates this client trusts + **/ + private static CloseableHttpClient getSSLClient(List trustedCertificatePaths) throws KeyStoreException, + NoSuchAlgorithmException, + KeyManagementException, IOException, CertificateException { + KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); + trustStore.load(null, null); + for (Certificate cert : CertParsingUtils.readCertificates(trustedCertificatePaths)) { + trustStore.setCertificateEntry(cert.toString(), cert); + } + final SSLContext sslContext = new SSLContextBuilder().loadTrustMaterial(trustStore, null).build(); + return HttpClients.custom().setSSLContext(sslContext).build(); + } + private static void privilegedConnect(CheckedRunnable runnable) throws Exception { try { AccessController.doPrivileged((PrivilegedExceptionAction) () -> { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java index df764bb3f4772..048ad2e8e3692 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java @@ -72,6 +72,8 @@ public class SSLServiceTests extends ESTestCase { private Path testnodeStore; private String testnodeStoreType; private Path testclientStore; + private Path testnodeCert; + private Path testnodeKey; private Environment env; @Before @@ -80,17 +82,20 @@ public void setup() throws Exception { if (randomBoolean()) { testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); // The default is to use JKS. Randomly test with explicit and with the default value. - testnodeStoreType = randomBoolean() ? "jks" : null; + testnodeStoreType = "jks"; } else { testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.p12"); testnodeStoreType = randomBoolean() ? "PKCS12" : null; } logger.info("Using [{}] key/truststore [{}]", testnodeStoreType, testnodeStore); + testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); testclientStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks"); env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); } public void testThatCustomTruststoreCanBeSpecified() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path testClientStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks"); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); @@ -110,7 +115,7 @@ public void testThatCustomTruststoreCanBeSpecified() throws Exception { .setSecureSettings(secureCustomSettings) .build(); - SSLConfiguration configuration = new SSLConfiguration(customTruststoreSettings, globalConfiguration(sslService)); + SSLConfiguration configuration = new SSLConfiguration(customTruststoreSettings, globalConfiguration(sslService)); SSLEngine sslEngineWithTruststore = sslService.createSSLEngine(configuration, null, -1); assertThat(sslEngineWithTruststore, is(not(nullValue()))); @@ -126,10 +131,10 @@ public void testThatCustomTruststoreCanBeSpecified() throws Exception { public void testThatSslContextCachingWorks() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) .setSecureSettings(secureSettings) .build(); SSLService sslService = new SSLService(settings, env); @@ -145,6 +150,7 @@ public void testThatSslContextCachingWorks() throws Exception { } public void testThatKeyStoreAndKeyCanHaveDifferentPasswords() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path differentPasswordsStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-different-passwords.jks"); MockSecureSettings secureSettings = new MockSecureSettings(); @@ -160,6 +166,7 @@ public void testThatKeyStoreAndKeyCanHaveDifferentPasswords() throws Exception { } public void testIncorrectKeyPasswordThrowsException() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path differentPasswordsStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-different-passwords.jks"); try { @@ -180,12 +187,12 @@ public void testIncorrectKeyPasswordThrowsException() throws Exception { public void testThatSSLv3IsNotEnabled() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration configuration = globalConfiguration(sslService); SSLEngine engine = sslService.createSSLEngine(configuration, null, -1); @@ -214,6 +221,7 @@ public void testThatCreateSSLEngineWithOnlyTruststoreWorks() throws Exception { public void testCreateWithKeystoreIsValidForServer() throws Exception { + assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm()); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); Settings settings = Settings.builder() @@ -227,6 +235,7 @@ public void testCreateWithKeystoreIsValidForServer() throws Exception { } public void testValidForServerWithFallback() throws Exception { + assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm()); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); Settings settings = Settings.builder() @@ -251,6 +260,7 @@ public void testValidForServerWithFallback() throws Exception { } public void testGetVerificationMode() throws Exception { + assumeFalse("Can't run in a FIPS JVM, TrustAllConfig is not a SunJSSE TrustManagers", inFipsJvm()); SSLService sslService = new SSLService(Settings.EMPTY, env); assertThat(globalConfiguration(sslService).verificationMode(), is(XPackSettings.VERIFICATION_MODE_DEFAULT)); @@ -273,7 +283,7 @@ public void testIsSSLClientAuthEnabled() throws Exception { Settings settings = Settings.builder() .put("xpack.ssl.client_authentication", "none") .put("xpack.security.transport.ssl.client_authentication", "optional") - .put("transport.profiles.foo.port", "9400-9410") + .put("transport.profiles.foo.port", "9400-9410") .build(); sslService = new SSLService(settings, env); assertFalse(sslService.isSSLClientAuthEnabled(globalConfiguration(sslService))); @@ -325,13 +335,12 @@ public void testCiphersAndInvalidCiphersWork() throws Exception { ciphers.add("foo"); ciphers.add("bar"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .putList("xpack.ssl.ciphers", ciphers.toArray(new String[ciphers.size()])) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration configuration = globalConfiguration(sslService); SSLEngine engine = sslService.createSSLEngine(configuration, null, -1); @@ -342,14 +351,14 @@ public void testCiphersAndInvalidCiphersWork() throws Exception { public void testInvalidCiphersOnlyThrowsException() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); - + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .putList("xpack.ssl.cipher_suites", new String[] { "foo", "bar" }) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .putList("xpack.ssl.cipher_suites", new String[]{"foo", "bar"}) + .setSecureSettings(secureSettings) + .build(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new SSLService(settings, env)); assertThat(e.getMessage(), is("none of the ciphers [foo, bar] are supported by this JVM")); @@ -357,12 +366,12 @@ public void testInvalidCiphersOnlyThrowsException() throws Exception { public void testThatSSLEngineHasCipherSuitesOrderSet() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration configuration = globalConfiguration(sslService); SSLEngine engine = sslService.createSSLEngine(configuration, null, -1); @@ -372,12 +381,12 @@ public void testThatSSLEngineHasCipherSuitesOrderSet() throws Exception { public void testThatSSLSocketFactoryHasProperCiphersAndProtocols() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration config = globalConfiguration(sslService); final SSLSocketFactory factory = sslService.sslSocketFactory(config); @@ -397,12 +406,12 @@ public void testThatSSLSocketFactoryHasProperCiphersAndProtocols() throws Except public void testThatSSLEngineHasProperCiphersAndProtocols() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration configuration = globalConfiguration(sslService); SSLEngine engine = sslService.createSSLEngine(configuration, null, -1); @@ -462,8 +471,8 @@ public void testEmptyTrustManager() throws Exception { assertThat(trustManager.getAcceptedIssuers(), emptyArray()); } - public void testGetConfigurationByContextName() throws Exception { + assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm()); final SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); sslContext.init(null, null, null); final String[] cipherSuites = sslContext.getSupportedSSLParameters().getCipherSuites(); @@ -525,7 +534,8 @@ public void testGetConfigurationByContextName() throws Exception { assertThat(mon3Config, sameInstance(global)); } - public void testReadCertificateInformation () throws Exception { + public void testReadCertificateInformation() throws Exception { + assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm()); final Path jksPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); final Path p12Path = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.p12"); final Path pemPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/active-directory-ca.crt"); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java index 6dd9bb2b46eb0..511fd77811365 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java @@ -22,10 +22,12 @@ public class StoreKeyConfigTests extends ESTestCase { public void testCreateKeyManagerUsingJKS() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); tryReadPrivateKeyFromKeyStore("jks", ".jks"); } public void testCreateKeyManagerUsingPKCS12() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); tryReadPrivateKeyFromKeyStore("PKCS12", ".p12"); } diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt new file mode 100644 index 0000000000000..59ecbd22e8b23 --- /dev/null +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDoDCCAoigAwIBAgIUMVGoHuyNTjTFaoRmqFELz75jzDEwDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMTgwMjE1MTc0OTExWhcNMjEwMjE0MTc0OTExWjARMQ8wDQYD +VQQDEwZzYW1iYTQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtGBwa +n+7JN2vweSUsYh4zPmh8RPIE+nEVjK1lx/rADUBY7UVjfTYC+MVKKiezZe7gYCNT +7JNKazPpgVI9e3ZFKw/UxomLqRuuvn5bTh+1tMs3afY5+GGzi7oPmEbBO3ceg0Hi +rNSTDa1rfroZnRYK8uIeSZacQnAW90plITI7rBBt9jq+W9albFbDybfDgNv+yS/C +rzIsofm4rbFC3SMRYfrT6HvwDhjOmmYKZci5x7tsn0T+3tSiR44Bw5/DgiN5kX3m +/kl9qg1eoYWbCUy1dKmQlb4Nb4uNcxrIugLB3zjBkfhMZ0OHoveKh/lJASTWik9k +xQ9rEYbpsRbuXpsHAgMBAAGjgcwwgckwHQYDVR0OBBYEFJOLa7UXKtLPibgKeFh7 +Kq1+rS0/MG8GA1UdIwRoMGaAFGaNmN5mi9jaMW25MEWYgt+5OkDBoTikNjA0MTIw +MAYDVQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBD +QYIUdwsnIxjgSneHNVKT6JNCCsrQ3T0wLAYDVR0RBCUwI4IJbG9jYWxob3N0hwR/ +AAABhxAAAAAAAAAAAAAAAAAAAAABMAkGA1UdEwQCMAAwDQYJKoZIhvcNAQELBQAD +ggEBAEHqT1WHkcF8DuOgyIBx7wKcUVQ5H1qYYlJ1xgMGrKFFZLUzouLcON7oadEu +HLIJ4Z3AKD3bqWpcls5XJ9MTECGR48tou67x9cXqTV7jR3Rh0H/VGwzwhR85vbpu +o8ielOPL8XAQOfnAFESJii5sfCU4ZwLg+3evmGZdKfhU6rqQtLimgG/Gm96vOJne +y0a/TZTWrfAarithkOHHXSSAhEI5SdW5SlZAytF4AmYqFvafwxe1+NyFwfCRy0Xl +H40WgVsq+z84psU+WyORb3THX5rgB4au9nuMXOqFKAtrJSI/uApncYraaqU28rqB +gYd8XrtjhKOLw+6viqAKu8l7/cs= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem new file mode 100644 index 0000000000000..63dca9651ed7e --- /dev/null +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-192-CBC,D097C279AD03E97C34B81B834539C0BB + +rQkO27X8phcP9ydaFArZy4SFDcoCGFsr2TcR0M6Vn8g0ZT9MIrnj2xHCmeMExlKT +MViBQzmupBs0IW7y/ovpEUBgRd4TnSigKNF2XZ3+C6F4rkziTheWOlaUq7PIqOnW +dTmf/WZDLlgms6hzrc7B447lO2FcNUDB6uXkxS1dAhh9T6DFcq9KuW7KJOWt9Dre +75z6rh5nHboUw2i01VqJK+86aL5V3vNK3bjYcbIHKHrgM/+V+hFUNJ6aEepVf+G0 +jzicgEBJwtzOg4MTSqR2z3niNbz9LLBJkH+uR+tN941+JPFmEwsLtkq3OBH7nraG +Srqz/5YcdA45jdZNme+KqorT8mNX/Hx2BsmQYnw+A0ZAqnSWeMcNZgFaAn8OcVxy +d5VS0KFlQD7DPnZi89cyTRScKhcj451NhwOPWIE7w1UpNPVGRj5FBMAtDVaE2Wkc +wuQ0nSwsC3EOvVDMe/fmv2VcoWceh1V9esA2H0n9NWQApGSqz17PPebwQadPX3Y0 +atrbbXT7fYTD3Ij38bmYZiDOluHiXxFchWxVUReq6nHJD3yo8ch2CKpx9XzjElLv +6leUZhlIlq026QxGNVf+OQLTlLXjF8jGbRC31Y4yiHj1I12P0+dic0W8YvUkC5Pp +w7NHMtYG6VHLGkPbCQxzTMS+JU24j/wMIokZWlmaRNDf2DZZAS0asQ/EOG/I1afc +SXyHDQUhi0/8N7CJWE/i1xLrazFKiIkxucxY+WDqGrk5sZnP+cH0cM7Zja253NTy +2B8LZJX360peCD15grkMt1oRtdMvZxF1rS/3BDPuANC59yg5i4eC+P39wkeMf8hu +o8I2Hp5021mT9AWE1Dxf8gSZIJZxIeEgioRvoY1LOEfghe/OXQHR0SXJq9k5GNiA +z4Tz3cfCT3WrHbReY585o2qtbpWu2OeSdxrv8p9kYi8GkevbR+nLq8NaH5WPRD4E +b9RLUz1NfiK8DOW7tk8+gwsNun3hmil7xubw1uhc6a0OGKb9ijMS0rslYW9zeowu +dkROuVwhUhXHOx0ZGWUGauJifERzICWR8Jx48/8DWD4xW3DkIRt5gh3CvzHcdSbv +4VEFSyndWeyNk2Yc6+sX0H35Rngc7gjedCAn8hUBnUq9srfhGKaZ6bahlUt0xsmK +Y3cxcd1or/2S2hONcN4NE2MfB/RRUVeHxdp34RPeW6L8qH/YZFxqt8dUm19QXr0c +CdgSEmVyKOpCPebGJwPwdJEmbxPS/98AjiqOpt83JLUhMeUGhjawXvLzl0YEBJV9 ++6waTV4Xl94aJszyvDeW/+cKIgeso9SeQSN6fLsXgdAVABCZ5yJ+liw6I84G0f2n +D6e51P0JQAL8v28bBACdoB0Qxr9UTV/X8smGTwWobv/KW1BPdvWETsc7TrtWLZ6F +qiZj7mI0h1oeyrC1h1+1oVuUTpy7JICCBloL4ir56lcSWmNZm1GRfar8UhXA7aNw +klAkS6rYHH4gDxhvwd1k/pN1HlCtbmwLyYC/f11+89RnPr0FFW48qMqmwBls63dp +4aAqneUiEow/evE4fBTLhFrgkvfZnjhd41IpzXfMWB5x9SHjrrS4/rjsHXcHUrAh +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.cert b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt similarity index 100% rename from x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.cert rename to x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem new file mode 100644 index 0000000000000..b0f7a585d7f9b --- /dev/null +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-192-CBC,2F36F79E75ACA7803AF1BC1B70C2360C + +d4/f7dnpPW9DfhGXuGDx7r56BjQ64iNcsQdrC1CTZB363iAvBBeHaoJFaWpQOUmT +WCBdM6YmRuMi6z4sMtm8Vre3i+Mu2Jti0LTDXqH8Y6FMpptxAYoFb9hkM0OvUGDJ +ASLGTa1XKyCzZWOe2IGLtWIS8zkGv/fSXHqDJV8ddq8uLHGZXBxAye5oJUD0654T +DasIllbiTlBEYlEJ10OyIAspS47n4LWVE5ggyGdiyIgh5s3lAMPO3+yenDU3/dzj +YF2zHKnfe0Nx2lUQVJRYknQvFCx0WTGH9gNl40FovoOMHyRjEuOHD9PFNvL/TCh6 +9h0jJPWu/SsIiYaIqR0pDgqWdHXHA5Ea3s2+w0YHbv7DqkGXWZdceLUdZtcXCBJk +P8QL9IWa7VE50SS6wV5uXX9tV5lHzMFsNGkwtGcR1UMU5cXYqckFXgoBqDN0fyWe +V5iEknSJ4Na+MHv75rvRat0kv6upu9i5NSpYTc5jLHdWZWsYMZ/ZMiMoLBP+KAPT +DQ3eyph/84BU3DePaQF3Rsp0ZvPxqQ361Zwc4zC5CKbHur1QX8WAY5XlBMfuBpkf +CKr5wgwF+ZpS7zsfUpMPPe9Y1E8TWnhx/DtCVLEslBpr2u/rMaxPp6ev9/Wry7N+ +UFBOwodipBhlfSvLqjc511L+bXRzqXiINuW0eSKUQv0J/G0Ur894kJJ6feDYlskj +JiZjOgOyyKhB+K9AXmkfRdvWUJeweL8pgDuYSyQdQ0zoUCZALEoYK2cBWzti/wep +QPFD5oz8076aXNHKMHLsRmSFuEZb6IN0PtUNVf958EbrtABNIuoufKlKtJsEnUyK +VHIEUxExEgyECiozKnxvhr7RQ9nTQXhNdgtec6jJblYnla/+OWAfHdxtHhBjp5AX +WvLyUhmgrmLNdKd1KSzcXynBHgh0hi0HJXYx31FilwbxsdhwN1LwN/Do4T4qGkUr +InrQC3ZHRuh0yAOPrwRFEWbW5m/PzVP/xYVgFtVWP7w38ftZbaBy5xPmtswn+PH+ +cIMt1Y9PaAlhLNpW/Vfn503T9M+05vu73vbU1xgu/B1kePOqE/WO0cOZl0KdaMmT +wAQBKuI7qTACH+/8g3Uir1YSChLthH+1Gs6h686EP6ZydwXq9GYXXkNmJNJJsnmU +RDjoT0F4XBKvcQdX3EeQYs3Af2yZWFDC59c1Ews2dqMK7vy2tYITbx2yn30DBDAl +xvjp2izzmAgQJEG/RqCYsUHCCEv7wz3tpsSOkFem9IHZpR2h8Rqy88GH9qYOkgwo ++fKSmIgC4RLQXsHuh7RRuyNc2FaWDgRgSxs5V4f9xOSU/ZbUftYWnwEyCwbu3RJp +CIXQFZhzU2t5l1Eh+x40rwpEJDXBEwmOIUO3x1oOqGZPPEQ674uMal5TRjvdOVGD +h665Fpo5Xu9EQwQZHYddeRl/7yw8F6LCxBLgHlngKRHHGDUHlTscLfYRqNh+x3jT +3S8dfaGzlnwdQEx32gyLAV0/nsFnzh1AknFMT8jesIYF7PLiAi67PNyNwRCc7TFp +jpKvzkDRVP72bivTmCyP5aKR0Q2oIrAw51MMinT6R2VaoR7COjoVbqYsRLwkxu+p +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt index 75aeecd0c3b36..5500e5db4c57f 100644 --- a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt @@ -1,23 +1,34 @@ -----BEGIN CERTIFICATE----- -MIID0zCCArugAwIBAgIJAPqdcmPxQKraMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV +MIIF0zCCA7ugAwIBAgIJAJZQBp49qNzmMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp -Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTgwNDMwMTUzODM1WhcNMjIwNDI5MTUzODM1 +Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTgwNzEyMTUyMjU0WhcNNDMwMzAzMTUyMjU0 WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV -BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEA0wNzfQ9K9WIs9Le6pfiEHdCI97zGZRqOREKb+zFoDllXEEWW -Y5mfXRlTYXbxBiCIV5DDW0yaXlleq62j7/O/6prNUBiYo5sK4Wfj+1UlXLmMikkv -bShm9XzBAXHK20coLJTKOH8QOnFyuVYUvHPCLsoEhQtCHU4zoGaaPmk8w1JU/bfR -+kWmU+x0Ak4rGuRWvMMqg/bu/W/1LmESO5Jsm2CnIyB/22vB08kFn1pO0IIrPQhr -dXqPxddzxc7DuAyyMyzsrLi5msugynEwm1CTBNL9cG45ujNhWzd1rqQe1HF94mEw -RinFe2Ui+vLFpNbh8EesLsy0p18J3QkGQ/0xjQIDAQABo4G/MIG8MAkGA1UdEwQC -MAAwHQYDVR0OBBYEFLUR8gs3uCNdLIwJlkp7SwngpjfhMIGPBgNVHREEgYcwgYSC -CWxvY2FsaG9zdIIVbG9jYWxob3N0LmxvY2FsZG9tYWluggpsb2NhbGhvc3Q0ghds -b2NhbGhvc3Q0LmxvY2FsZG9tYWluNIIKbG9jYWxob3N0NoIXbG9jYWxob3N0Ni5s -b2NhbGRvbWFpbjaHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAEwDQYJKoZIhvcNAQEL -BQADggEBAB73kmak89jgW1q3wIUMfxisokCsk7XuYqPBpc8D06UZBaArCvW2cuLP -5PLI2bR7fSIhgZmlbawa3adOLZ9n9lOJrWUHTh43kKyiSX8EobE0T/MSGVMfTEFu -c92erLS2gSDk4wLahzm5htd1h1KwzJ5j5kdzlLGaQsnxDDjbu9BiX/otEyCl8n69 -AZKOXoYscl2NxBgr2V6C2frthJFfQ9Gxzg5q6V/P3aIUyV0xsC3Ve1wdfXqNeRfO -nrnFbKRDsxJAJ/JtO3GTtqBrFjods0sepKNxFg13r/QLJnYjYW6t7o91JZj2AFOs -1INZnCOAMV3vR/FOwwOT86HDgrwSy2o= +BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIICIjANBgkqhkiG9w0BAQEFAAOC +Ag8AMIICCgKCAgEAqLiqAPYBBEBvSJCiQOzV/NPgr0kLQkZGaxQ29PPoKbsKXVp+ +0Uyv9DUitPw+G04h/eOC2wiu2u5sn2rD4Ew09L41qiaViQRY6dHazAkPVq8Chet/ +GWrc+umLJUKLU7MTyC8ozmKjgkyvIuvQ6ndEP31z3JG/j9DsBAX8NWIIJSm9Jaz5 +XUS4fIXwSce141k8zb39+oStyA1qIhv6n59+oNIuuXu1XIJzjQnZCnyAO8/9i7LI +uoL93zu7xNT+ns6Tt7zhweUQEI8FeRdj/k/Of8prbaulFH9oM1g/lnGKLV7E5vh/ +foP1+SRW+MWRjAUA8MExTCtvFhRAb3x6FYzCPX3VERKn8M3m6Rewz/LQ7XG2VzdM +/dw/JSZilAWBZItkY9H1InTeIz9Sw9Z53um7tO5nzq1QQxZijh6n9vzSLoFn6qA5 +SDQl2YycjBE35i/9JBUl0KmVMOfFzpoWLiKzTJMRzNJIeRxJl3MvscbRl8fY0Kco +XQ+w84QMTo+Tn+8Ztfme4uGtHHCTRzrLSo+Hyh8BTm9jJKCaUbMnNW1p2LoxJlq5 +ah+W8QRLaFbWt4xl9TQR0kVnoOyfSGx+3SmTBODa+6Wg038eim8Tw8K+EBvxynWU +sKF1ReL0aOZLi2zqPiQTUtR2y5ue/xjsFWx+tMMGTz3Ci6UoFs9yKqQ++qkCAwEA +AaOBvzCBvDAJBgNVHRMEAjAAMB0GA1UdDgQWBBQLFB6mVsZpiHNMkxOgriN53HZv +ODCBjwYDVR0RBIGHMIGEgglsb2NhbGhvc3SCFWxvY2FsaG9zdC5sb2NhbGRvbWFp +boIKbG9jYWxob3N0NIIXbG9jYWxob3N0NC5sb2NhbGRvbWFpbjSCCmxvY2FsaG9z +dDaCF2xvY2FsaG9zdDYubG9jYWxkb21haW42hwR/AAABhxAAAAAAAAAAAAAAAAAA +AAABMA0GCSqGSIb3DQEBCwUAA4ICAQAQtteI+//qUZ7pNQ5tBuwc24UlSH4dbHFr +79pA0YN/Y7Y/PfU60YeGpPf/QzZWLnRk/8mELWy2Ok7KzhlNjr/nAksiF97LUUnl +6dP15a4ATzheYAtv/47In8ShOrPZ7YG+VAmNWRB8Rj62RuE5khcoL8BpWzCHL8Gx +Kif7lN5SOV06ExGHrBr4Y20QtZsTgkWhAP1UC5EwXxKcchCAIrb60Rx1OShzQrxz +I0RF4mfJ06Jad6qUePkPoUm1lPcFfNvAnJ+hBA210J87NSMFTSsgXT197upnCdhl +9QmKHyk12oVoMtTtf2kc1yTZQ1fnm/V4/PZ7ypyhy+jMsIQSTwEKQr7JYEQoYpdt +yTMHc9L4gPkLTwAbxUx/M1lSuQ4yD3SclBt77CxCM8I9Z76qCVakalb4VhplRbsU +sYs3dgvslFOAJKoOIPeTqm+2L6psxiV9WxA6tvEAk6P/AwjOK6Y4hclnoQDgNI2Q +U+T+ZPi5WRmibs0WxfQv4Vv4MQG/LRT3pwsKWQ76TzgtHKNHgtAKd9Sn0Nylacsh +yhDbvI0jnvwoOlUPUVWerSJLxzazG5mRLi94ZxZMb+7TUUtDBEPtkCquCAA2b4Kh +ykHbmBqhC8Pn9qc9iP0uHuuQETUl/zd7E2ZD3RIj4uYYspovihE5D1Svi5m+3NuS +sCsfHRjmMw== -----END CERTIFICATE----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks index 5a5596dae48da..0df3fdba60daf 100644 Binary files a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks and b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks differ diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.p12 b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.p12 index b2efbc498999d..592bceaaf0aa6 100644 Binary files a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.p12 and b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.p12 differ diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem index 131f25a36e161..f8251130891c9 100644 --- a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem @@ -1,27 +1,54 @@ -----BEGIN RSA PRIVATE KEY----- -MIIEpAIBAAKCAQEA0wNzfQ9K9WIs9Le6pfiEHdCI97zGZRqOREKb+zFoDllXEEWW -Y5mfXRlTYXbxBiCIV5DDW0yaXlleq62j7/O/6prNUBiYo5sK4Wfj+1UlXLmMikkv -bShm9XzBAXHK20coLJTKOH8QOnFyuVYUvHPCLsoEhQtCHU4zoGaaPmk8w1JU/bfR -+kWmU+x0Ak4rGuRWvMMqg/bu/W/1LmESO5Jsm2CnIyB/22vB08kFn1pO0IIrPQhr -dXqPxddzxc7DuAyyMyzsrLi5msugynEwm1CTBNL9cG45ujNhWzd1rqQe1HF94mEw -RinFe2Ui+vLFpNbh8EesLsy0p18J3QkGQ/0xjQIDAQABAoIBAG4zFTMws2LnPLco -5I8c9FJlv8IxkiLm9Xlsl+1dZgxoV/Glhd7KMdH7EfKKnWEnyEyCy6N4o/22Cpz5 -9+2ujIHHCoZT8hMvO0Vwbj2pv/Nm66Cki+IkAXEfD+TjOp+9z5tIsTtEEIOVD+b+ -r6GjirvIwQeCU6reVlPkgZUqfJM8Drh2vOKzTqalJ7Ud4lsHeOaCvAaLp5RKz6sl -BSX1m3UyRkrOQRFNeD8FwXGgPmxZCbCdLka8em9b2IHFZmfFm+R63NeOWx4wQIF8 -Wh2ncLf3enQU9Xsti1bQ3Q1SALG5Xjf0rtjlk5LklnQ17MuFKLgsIbJ5e9Lg82Bd -DyrwuMECgYEA+orKql23N9x6J2GdpUHIcJ465b8o8DK/HPE880pwkMKzxaKSMhuP -VmOi9lJl0QzxL+cqIKoDxQxr9rzY8/fRa+FHPn2ypJeV7osyDCZKcwJyh8PfHD69 -XIhBXlcr9qLLXj1EqKEMGnCAcbDTiLPsReYfpJ4elrtE9gNstIqqOrkCgYEA15w3 -5Y4aqmjCABQW3SyVah772xYJWmscwxJWTrMuBm4EUoMjx2M2qHf8xLPoay4Q9YTR -v+HtaTw1LAC7tTvuPIGB3Iopj4pyhBWX4m0kxicV7clJFB3eDrl4Dy/70IEJtWxk -xxdEjxwgFhChWOeB4IdcLc/QnGqIZq9v1WGps3UCgYEA2jU9dAoxRAbToS8EtWTI -5UT6eRT329Pm3ZdCBYdE6N+a4TvJZrjYK16WFUEIin1iJt2HtHE+3UUHVxt9PWcO -T44ilcu2ZmHb5Kn5Cdnjp61vAM3YqOn/1PKbNEZp9OIui8KC5Pd26iQnyl0+r2Y9 -F1fpIV/RtmgeFMJA0DGhN4kCgYB5jvzAsmZ8TwWZb7oYRmuS4hcSxRDO5nbX0D3S -rxiyzVIohwEkB96oCsX6zIer7dPn6FMtQJ5OZ4B+54bsvfYqLMXfZhHaeK7poxb3 -tJs4EI8/ee12pwQr/wseXOo2+plTrOIAkTC5Ep1Qc13UNum1gftJZNHzbeirJBwE -DMTVhQKBgQCN7jA7Y7eyXdm6fQpCn/gOkKb4dGhKPlrNOrRoD2S/L1YBJjreGZPm -NARLPO0H1Wrkg54KSIvf69COjVJeyw8L0HGE2eQQOwS7dAvy8rdj+UvABXgaQCxv -wJsTOZEycAeCyp4wJOLeksMB58bzIXPDOPNlN6buWBVLjm33q7g3Rg== +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,50C44E2D186DDB06 + +rofPrFk6MlImSnj1Gx2qwUl9N45Ao4u6FoqxMvw2F3X/MqnZ/0hvA+7Yh65C13Hb +uTMBfSXDMpaWd3tV+1CA6fEVfMCq1Z894Chqvw6ygHzIMlCwDvar4jUEYOjOsRQE +9wzngqGSd+uVz93im6NSFvt2LJ77qKHE8R/REX5c7fr2eq4rgZU/58q2RxvXXnWV +rQGnAPoRIeyRStMKaV5gytIxME+88XDx5jr86a4wr4wraw7mbRlJG1ERWqNfSCNP +tzBPo5LXZMQIvMs3qieclUWYH4vL3gKBcOVmKlsUB/0ov3vC/nrCE3BmvgsL9SFj +ZUCrGsVH+nsRj6mzHSJtUQVSXHvoRGVh8erBCt5H8KmMyv8F8aiUERWeOX63Erk9 +2x4g9trewa6CCXXB+sKKCwqP06HNajjPo5WnTEPPag6WE06O3//yduagcR1QD3Os +Xw0pyZQxf2/4nX4jLNipHCyuYkgcDCarRG4/AgTTI5+cv3CS7C73HwVqz8v5Dh5P +I0ZCdqeK7R5wSlF58HzVGfv9lyjvUohSiQrCDQBhjVrnTjXR0kc0lE0GaoMnOsPR +X0UAIxRnpM43q9A/7z+VEEV4CtzWAt6dIwqFaHr8iVHROy4UUcoC7aMa8rVq4PX7 +YX+vBInOiY+APK/mF22Ao3Ho7uehDTIeBfbF1ED6ugTJvhCIiObsCTh79SDEH1PC +zvVVWl3SO8Std5KnuoQMEqi3kv2I3mBlGjWshjVz856OudX57KTcEAkrJhUD4SFb +cwwDdxY0I/jTD0/oE3VL/d1qhh8mt6oj74z8a68BL7B7xo9mgPuVO5z0ZblizulN +6S+vyb5q0DJIpPDkI0IEx1eEiNiM+WkTHG28SCZR/sq3TGH0B5o/qQzp9O2JlwLB +KiZ8qAbzsYJo0VAIAYBuWdM+pKXcrIIoqa278Af4nmwoSkBzyx3wIGZLWYrzdGd5 +bx+S8OXdDp7YQraFvtJmNC5KqVY8wsZr77oXyQua5iRANh/5AWKbQWdawkOj34KT +DFDsuLIYCrpInor2JoSMBagq1PSh8FzlzvY+Tsof3U1LpP4cVQYtgeSu+0TPhqFP +L8PYaqeLfPGB3Yt+hPBMtFA21jgv+BSGRSqwQYfcpvaO5+Dy8OAKRlzwc9rRZY4I +XBU56WtRvJHUaGPFiAizgR2w0//SPRtVXNIyfS3GjkjnYofx8agXVKhFyL31fCUv +qVvJ3L1MPynp0voOLObiMIskU5vQLY7UWRzb/wbGknfurAV8xQhnJgcL4+VeoCBO +JPjd8yz7FWwdEXhMyOB8GZNzIGDxY3hsfDR1uNwx45264e2tjcW/ZPUoC1lTO/v5 +U6eYk9VZybvJaeb/NNDQLUCwV3T4TEFSvDSgld/GFZNvGBqZYwuAhBKbkrO1f66l +6q4fZgG07uBobOsuvY7Adr1Y4iz09Yjw2F2G5SJ+liZ4rijeN0+fkvV0vwp7GRiP +2WmQIYvdWSek4EDJaEQyUJxLTskveyHLRDV2bo+QhSuh4bB6pCzNoS8Kco6mr1Tg +hx47UrsW/w/KKx90Yl/NUjCMoX77QeHLKcDNreJ6xqGNfQDmcTt962uHm68ZwwFe +qnWxqCQ9stXNh7M0AuG47hAOPEBZzpMRN6ek8akOuZKo3YzdYVneWofav53OwMMU +N1Aj2ktX/IIvYiYbhF9RfiwYJ/UfVOjaiBtfAWgLdY7ZxQWQWL2Ld1Zf4SJ0orAM +ffy3rFAn8A0HpFZOhpOi/IciwOcGvWtJOmLIIxEpnpYY6333poo9oiN3+FYKhUlL +E94Mc7CLnI8Z5TlMdqdE9EFQLUQRjeUxMWwqd8o2s2BNI6vtgIwWTJvnOOQ2YuaZ +hFzGrIef+6Nz2Za2jORj7KyfKDNEcc9YXugBFefXUmU8TkstDpM3BNPk9qjBX4// +D3Nz/8hFugujw/KPb+hFy2knB9jZrmqgDykj8Pf8KA3iWWc/hEr1uv/QveJcJzjp +OSOx7fYjJ9giZFDcbcc3oFFseo1IVzfUFGc5rb3y0KqZd7XS8eKPiw5X9NTZIYaM +ynos8jsYEH65zDdilAMpqlA0Daa+2Az7661Gk7mMIsNyt1eanyYbVJswqOu2IzFd +CjmaDQCeTqAoxOzLHoYxusM3vX3CkQg/e1ZNjqbCNB/RtPOj83rWbf8+Iujkzah7 +80v0B3wwXXlZQCk6Wq8nM1vK2W9k37GwPCalibMD+McvOrgblWwUKn9snWLSXym0 +d+QPjobJzH8DezMWbE+LgaFB7O2F4cC18WRPGZLUa+Wc9bQJODbxrpfvFE8NcR7r +PjqXHmcSxbIAJdPzcclKSuibSzhh6QwcZrckoMSH6gWVWJ0lfz/UQZAeCU0MkboR +hpOT24urjQ3yr37ScsuZPfPQhaYr9ML/AWJ9bhotoucCcs7KGrGOc4uXo4thuZG7 +cBO6ZBDHIiEx3njGQVpCxNOij5gmNYetAQARkmBVJiaU2i7gCR6sQB8EkHOjHrGW +5iNbXsNOKGXQzRFe7tA2DOSV7Lsy0Gbr6/xf3NA4oDzKU06gQmT46CJ3wHWTuZ1P +pUn5mJNBJFcqLP+XPfQ/bSzqu81TYCm2/ZTmJw9POgdejYglpeGgHzKuUZef4bJA +8l9pEa2suE89+Ag4WyPb9izox3qObVUahDuulgBDXLkOJxBpr3oN0JKFGUxjvG5Y +A5DEq1Ya7i64cNmAc4S52cHmnBsEzfDZscv4yL5Cp3XbBTIbE5G0bi/0FxfJQ2gU +T3TtNxVkFxtvoSEl2yNt5aq1hZZlNOVla65w8Rcmv7IHZC4GT4PaOuB3IBrr0Kc0 +BZFJEj8rRuNiegvdnXNcbMNH7nXgnQYhZTZH8wNI35VntByOmrjc4GXgpdK144aj +1KwQ/klmJuvmJR+3FI0BauIFJvbe6kcsUlCvI4E5QApVMMUUwt3bH4aek+7MjqPS +1dB0L0a95MV4jQUaRVIAwXeQinkefIj1wYpIKnCL34Lc5EF/QVd8xNF2uL4JsLxD +ptjilmER7nuBvtgJd3uLmGHpzB8oKMVf5CK1bsWqGPUTffpUM2fz0/gMa1/aOt1l +q2OEDs6k5OScHHYSHs10ObRyD6yehNmFCIVSSgz2gu9h9D8xHshKR7OMmFxFkAlf -----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index c3d31ae10e925..f7fab029c8803 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; +import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; @@ -50,6 +51,7 @@ import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.process.autodetect.UpdateParams; @@ -170,6 +172,22 @@ public JobState getJobState(String jobId) { return MlMetadata.getJobState(jobId, tasks); } + /** + * Validate the char filter/tokenizer/token filter names used in the categorization analyzer config (if any). + * This validation has to be done server-side; it cannot be done in a client as that won't have loaded the + * appropriate analysis modules/plugins. + * The overall structure can be validated at parse time, but the exact names need to be checked separately, + * as plugins that provide the functionality can be installed/uninstalled. + */ + static void validateCategorizationAnalyzer(Job.Builder jobBuilder, AnalysisRegistry analysisRegistry, Environment environment) + throws IOException { + CategorizationAnalyzerConfig categorizationAnalyzerConfig = jobBuilder.getAnalysisConfig().getCategorizationAnalyzerConfig(); + if (categorizationAnalyzerConfig != null) { + CategorizationAnalyzer.verifyConfigBuilder(new CategorizationAnalyzerConfig.Builder(categorizationAnalyzerConfig), + analysisRegistry, environment); + } + } + /** * Stores a job in the cluster state */ @@ -177,7 +195,7 @@ public void putJob(PutJobAction.Request request, AnalysisRegistry analysisRegist ActionListener actionListener) throws IOException { request.getJobBuilder().validateAnalysisLimitsAndSetDefaults(maxModelMemoryLimit); - request.getJobBuilder().validateCategorizationAnalyzer(analysisRegistry, environment); + validateCategorizationAnalyzer(request.getJobBuilder(), analysisRegistry, environment); Job job = request.getJobBuilder().build(new Date()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzer.java index a0101b999d5dc..6111fa139f97f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzer.java @@ -9,9 +9,21 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.CustomAnalyzerProvider; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import java.io.Closeable; @@ -19,11 +31,16 @@ import java.util.ArrayList; import java.util.List; - /** * The categorization analyzer. * * Converts messages to lists of tokens that will be fed to the ML categorization algorithm. + * + * The code in {@link #makeAnalyzer} and the methods it calls is largely copied from {@link TransportAnalyzeAction}. + * Unfortunately there is no easy way to reuse a subset of the _analyze action implementation, as the + * logic required here is not quite identical to that of {@link TransportAnalyzeAction}, and the required code is + * hard to partially reuse. + * TODO: consider refactoring ES core to allow more reuse. */ public class CategorizationAnalyzer implements Closeable { @@ -33,7 +50,7 @@ public class CategorizationAnalyzer implements Closeable { public CategorizationAnalyzer(AnalysisRegistry analysisRegistry, Environment environment, CategorizationAnalyzerConfig categorizationAnalyzerConfig) throws IOException { - Tuple tuple = categorizationAnalyzerConfig.toAnalyzer(analysisRegistry, environment); + Tuple tuple = makeAnalyzer(categorizationAnalyzerConfig, analysisRegistry, environment); analyzer = tuple.v1(); closeAnalyzer = tuple.v2(); } @@ -69,4 +86,193 @@ public List tokenizeField(String fieldName, String fieldValue) { } return tokens; } + + /** + * Verify that the config builder will build a valid config. This is not done as part of the basic build + * because it verifies that the names of analyzers/tokenizers/filters referenced by the config are + * known, and the validity of these names could change over time. Additionally, it has to be done + * server-side rather than client-side, as the client will not have loaded the appropriate analysis + * modules/plugins. + */ + public static void verifyConfigBuilder(CategorizationAnalyzerConfig.Builder configBuilder, AnalysisRegistry analysisRegistry, + Environment environment) throws IOException { + Tuple tuple = makeAnalyzer(configBuilder.build(), analysisRegistry, environment); + if (tuple.v2()) { + tuple.v1().close(); + } + } + + /** + * Convert a config to an {@link Analyzer}. This may be a global analyzer or a newly created custom analyzer. + * In the case of a global analyzer the caller must NOT close it when they have finished with it. In the case of + * a newly created custom analyzer the caller is responsible for closing it. + * @return The first tuple member is the {@link Analyzer}; the second indicates whether the caller is responsible + * for closing it. + */ + private static Tuple makeAnalyzer(CategorizationAnalyzerConfig config, AnalysisRegistry analysisRegistry, + Environment environment) throws IOException { + String analyzer = config.getAnalyzer(); + if (analyzer != null) { + Analyzer globalAnalyzer = analysisRegistry.getAnalyzer(analyzer); + if (globalAnalyzer == null) { + throw new IllegalArgumentException("Failed to find global analyzer [" + analyzer + "]"); + } + return new Tuple<>(globalAnalyzer, Boolean.FALSE); + } else { + List charFilterFactoryList = parseCharFilterFactories(config, analysisRegistry, environment); + + Tuple tokenizerFactory = parseTokenizerFactory(config, analysisRegistry, environment); + + List tokenFilterFactoryList = parseTokenFilterFactories(config, analysisRegistry, environment, + tokenizerFactory, charFilterFactoryList); + + return new Tuple<>(new CustomAnalyzer(tokenizerFactory.v1(), tokenizerFactory.v2(), + charFilterFactoryList.toArray(new CharFilterFactory[charFilterFactoryList.size()]), + tokenFilterFactoryList.toArray(new TokenFilterFactory[tokenFilterFactoryList.size()])), Boolean.TRUE); + } + } + + + /** + * Get char filter factories for each configured char filter. Each configuration + * element can be the name of an out-of-the-box char filter, or a custom definition. + */ + private static List parseCharFilterFactories(CategorizationAnalyzerConfig config, AnalysisRegistry analysisRegistry, + Environment environment) throws IOException { + List charFilters = config.getCharFilters(); + final List charFilterFactoryList = new ArrayList<>(); + for (CategorizationAnalyzerConfig.NameOrDefinition charFilter : charFilters) { + final CharFilterFactory charFilterFactory; + if (charFilter.name != null) { + AnalysisModule.AnalysisProvider charFilterFactoryFactory = + analysisRegistry.getCharFilterProvider(charFilter.name); + if (charFilterFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global char filter under [" + charFilter.name + "]"); + } + charFilterFactory = charFilterFactoryFactory.get(environment, charFilter.name); + } else { + String charFilterTypeName = charFilter.definition.get("type"); + if (charFilterTypeName == null) { + throw new IllegalArgumentException("Missing [type] setting for char filter: " + charFilter.definition); + } + AnalysisModule.AnalysisProvider charFilterFactoryFactory = + analysisRegistry.getCharFilterProvider(charFilterTypeName); + if (charFilterFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global char filter under [" + charFilterTypeName + "]"); + } + Settings settings = augmentSettings(charFilter.definition); + // Need to set anonymous "name" of char_filter + charFilterFactory = charFilterFactoryFactory.get(buildDummyIndexSettings(settings), environment, "_anonymous_charfilter", + settings); + } + if (charFilterFactory == null) { + throw new IllegalArgumentException("Failed to find char filter [" + charFilter + "]"); + } + charFilterFactoryList.add(charFilterFactory); + } + return charFilterFactoryList; + } + + /** + * Get the tokenizer factory for the configured tokenizer. The configuration + * can be the name of an out-of-the-box tokenizer, or a custom definition. + */ + private static Tuple parseTokenizerFactory(CategorizationAnalyzerConfig config, + AnalysisRegistry analysisRegistry, Environment environment) + throws IOException { + CategorizationAnalyzerConfig.NameOrDefinition tokenizer = config.getTokenizer(); + final String name; + final TokenizerFactory tokenizerFactory; + if (tokenizer.name != null) { + name = tokenizer.name; + AnalysisModule.AnalysisProvider tokenizerFactoryFactory = analysisRegistry.getTokenizerProvider(name); + if (tokenizerFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global tokenizer under [" + name + "]"); + } + tokenizerFactory = tokenizerFactoryFactory.get(environment, name); + } else { + String tokenizerTypeName = tokenizer.definition.get("type"); + if (tokenizerTypeName == null) { + throw new IllegalArgumentException("Missing [type] setting for tokenizer: " + tokenizer.definition); + } + AnalysisModule.AnalysisProvider tokenizerFactoryFactory = + analysisRegistry.getTokenizerProvider(tokenizerTypeName); + if (tokenizerFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global tokenizer under [" + tokenizerTypeName + "]"); + } + Settings settings = augmentSettings(tokenizer.definition); + // Need to set anonymous "name" of tokenizer + name = "_anonymous_tokenizer"; + tokenizerFactory = tokenizerFactoryFactory.get(buildDummyIndexSettings(settings), environment, name, settings); + } + return new Tuple<>(name, tokenizerFactory); + } + + /** + * Get token filter factories for each configured token filter. Each configuration + * element can be the name of an out-of-the-box token filter, or a custom definition. + */ + private static List parseTokenFilterFactories(CategorizationAnalyzerConfig config, + AnalysisRegistry analysisRegistry, Environment environment, + Tuple tokenizerFactory, + List charFilterFactoryList) throws IOException { + List tokenFilters = config.getTokenFilters(); + final List tokenFilterFactoryList = new ArrayList<>(); + for (CategorizationAnalyzerConfig.NameOrDefinition tokenFilter : tokenFilters) { + TokenFilterFactory tokenFilterFactory; + if (tokenFilter.name != null) { + AnalysisModule.AnalysisProvider tokenFilterFactoryFactory; + tokenFilterFactoryFactory = analysisRegistry.getTokenFilterProvider(tokenFilter.name); + if (tokenFilterFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global token filter under [" + tokenFilter.name + "]"); + } + tokenFilterFactory = tokenFilterFactoryFactory.get(environment, tokenFilter.name); + } else { + String filterTypeName = tokenFilter.definition.get("type"); + if (filterTypeName == null) { + throw new IllegalArgumentException("Missing [type] setting for token filter: " + tokenFilter.definition); + } + AnalysisModule.AnalysisProvider tokenFilterFactoryFactory = + analysisRegistry.getTokenFilterProvider(filterTypeName); + if (tokenFilterFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global token filter under [" + filterTypeName + "]"); + } + Settings settings = augmentSettings(tokenFilter.definition); + // Need to set anonymous "name" of token_filter + tokenFilterFactory = tokenFilterFactoryFactory.get(buildDummyIndexSettings(settings), environment, "_anonymous_tokenfilter", + settings); + tokenFilterFactory = CustomAnalyzerProvider.checkAndApplySynonymFilter(tokenFilterFactory, tokenizerFactory.v1(), + tokenizerFactory.v2(), tokenFilterFactoryList, charFilterFactoryList, environment); + } + if (tokenFilterFactory == null) { + throw new IllegalArgumentException("Failed to find or create token filter [" + tokenFilter + "]"); + } + tokenFilterFactoryList.add(tokenFilterFactory); + } + return tokenFilterFactoryList; + } + + /** + * The Elasticsearch analysis functionality is designed to work with indices. For + * categorization we have to pretend we've got some index settings. + */ + private static IndexSettings buildDummyIndexSettings(Settings settings) { + IndexMetaData metaData = IndexMetaData.builder(IndexMetaData.INDEX_UUID_NA_VALUE).settings(settings).build(); + return new IndexSettings(metaData, Settings.EMPTY); + } + + /** + * The behaviour of Elasticsearch analyzers can vary between versions. + * For categorization we'll always use the latest version of the text analysis. + * The other settings are just to stop classes that expect to be associated with + * an index from complaining. + */ + private static Settings augmentSettings(Settings settings) { + return Settings.builder().put(settings) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .build(); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizerFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizerFactory.java index 40fee1f40f138..95cba4f2dccb5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizerFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizerFactory.java @@ -20,7 +20,7 @@ public class MlClassicTokenizerFactory extends AbstractTokenizerFactory { public MlClassicTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzerTests.java index 9f03952165c13..59413f6a61879 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzerTests.java @@ -22,7 +22,6 @@ import java.util.HashMap; import java.util.Map; - public class CategorizationAnalyzerTests extends ESTestCase { private AnalysisRegistry analysisRegistry; @@ -41,6 +40,158 @@ public void setup() throws Exception { analysisRegistry = buildTestAnalysisRegistry(environment); } + public void testVerifyConfigBuilder_GivenNoConfig() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is not a global analyzer must specify a [tokenizer] field", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenDefault() throws IOException { + CategorizationAnalyzerConfig defaultConfig = CategorizationAnalyzerConfig.buildDefaultCategorizationAnalyzer(null); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(defaultConfig); + CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment); + } + + public void testVerifyConfigBuilder_GivenValidAnalyzer() throws IOException { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder().setAnalyzer("standard"); + CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment); + } + + public void testVerifyConfigBuilder_GivenInvalidAnalyzer() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder().setAnalyzer("does not exist"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("Failed to find global analyzer [does not exist]", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenValidCustomConfig() throws IOException { + Map ignoreStuffInSqaureBrackets = new HashMap<>(); + ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); + ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); + Map ignoreStuffThatBeginsWithADigit = new HashMap<>(); + ignoreStuffThatBeginsWithADigit.put("type", "pattern_replace"); + ignoreStuffThatBeginsWithADigit.put("pattern", "^[0-9].*"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(ignoreStuffInSqaureBrackets) + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter(ignoreStuffThatBeginsWithADigit) + .addTokenFilter("snowball"); + CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithInvalidCharFilter() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter("wrong!") + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter("snowball"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("Failed to find global char filter under [wrong!]", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithMisconfiguredCharFilter() { + Map noPattern = new HashMap<>(); + noPattern.put("type", "pattern_replace"); + noPattern.put("attern", "should have been pattern"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(noPattern) + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter("snowball"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("pattern is missing for [_anonymous_charfilter] char filter of type 'pattern_replace'", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithInvalidTokenizer() { + Map ignoreStuffInSqaureBrackets = new HashMap<>(); + ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); + ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(ignoreStuffInSqaureBrackets) + .setTokenizer("oops!") + .addTokenFilter("lowercase") + .addTokenFilter("snowball"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("Failed to find global tokenizer under [oops!]", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenNoTokenizer() { + Map ignoreStuffInSqaureBrackets = new HashMap<>(); + ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); + ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); + Map ignoreStuffThatBeginsWithADigit = new HashMap<>(); + ignoreStuffThatBeginsWithADigit.put("type", "pattern_replace"); + ignoreStuffThatBeginsWithADigit.put("pattern", "^[0-9].*"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(ignoreStuffInSqaureBrackets) + .addTokenFilter("lowercase") + .addTokenFilter(ignoreStuffThatBeginsWithADigit) + .addTokenFilter("snowball"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is not a global analyzer must specify a [tokenizer] field", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithInvalidTokenFilter() { + Map ignoreStuffInSqaureBrackets = new HashMap<>(); + ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); + ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(ignoreStuffInSqaureBrackets) + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter("oh dear!"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("Failed to find global token filter under [oh dear!]", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithMisconfiguredTokenFilter() { + Map noPattern = new HashMap<>(); + noPattern.put("type", "pattern_replace"); + noPattern.put("attern", "should have been pattern"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter("html_strip") + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter(noPattern); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("pattern is missing for [_anonymous_tokenfilter] token filter of type 'pattern_replace'", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenAnalyzerAndCharFilter() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .setAnalyzer("standard") + .addCharFilter("html_strip"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [char_filter] field", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenAnalyzerAndTokenizer() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .setAnalyzer("standard") + .setTokenizer("classic"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [tokenizer] field", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenAnalyzerAndTokenFilter() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .setAnalyzer("standard") + .addTokenFilter("lowercase"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [filter] field", e.getMessage()); + } + // The default categorization analyzer matches what the analyzer in the ML C++ does public void testDefaultCategorizationAnalyzer() throws IOException { CategorizationAnalyzerConfig defaultConfig = CategorizationAnalyzerConfig.buildDefaultCategorizationAnalyzer(null); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java index 9c725fe76292a..2fe2c0b334c4e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java @@ -6,175 +6,17 @@ package org.elasticsearch.xpack.ml.job.config; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import org.elasticsearch.xpack.core.ml.MlParserType; -import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzerTests; -import org.junit.Before; import java.io.IOException; import java.util.HashMap; import java.util.Map; - public class CategorizationAnalyzerConfigTests extends AbstractSerializingTestCase { - private AnalysisRegistry analysisRegistry; - private Environment environment; - - @Before - public void setup() throws Exception { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); - environment = TestEnvironment.newEnvironment(settings); - analysisRegistry = CategorizationAnalyzerTests.buildTestAnalysisRegistry(environment); - } - - public void testVerify_GivenNoConfig() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is not a global analyzer must specify a [tokenizer] field", e.getMessage()); - } - - public void testVerify_GivenDefault() throws IOException { - CategorizationAnalyzerConfig defaultConfig = CategorizationAnalyzerConfig.buildDefaultCategorizationAnalyzer(null); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(defaultConfig); - builder.verify(analysisRegistry, environment); - } - - public void testVerify_GivenValidAnalyzer() throws IOException { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder().setAnalyzer("standard"); - builder.verify(analysisRegistry, environment); - } - - public void testVerify_GivenInvalidAnalyzer() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder().setAnalyzer("does not exist"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("Failed to find global analyzer [does not exist]", e.getMessage()); - } - - public void testVerify_GivenValidCustomConfig() throws IOException { - Map ignoreStuffInSqaureBrackets = new HashMap<>(); - ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); - ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); - Map ignoreStuffThatBeginsWithADigit = new HashMap<>(); - ignoreStuffThatBeginsWithADigit.put("type", "pattern_replace"); - ignoreStuffThatBeginsWithADigit.put("pattern", "^[0-9].*"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(ignoreStuffInSqaureBrackets) - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter(ignoreStuffThatBeginsWithADigit) - .addTokenFilter("snowball"); - builder.verify(analysisRegistry, environment); - } - - public void testVerify_GivenCustomConfigWithInvalidCharFilter() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter("wrong!") - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter("snowball"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("Failed to find global char filter under [wrong!]", e.getMessage()); - } - - public void testVerify_GivenCustomConfigWithMisconfiguredCharFilter() { - Map noPattern = new HashMap<>(); - noPattern.put("type", "pattern_replace"); - noPattern.put("attern", "should have been pattern"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(noPattern) - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter("snowball"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("pattern is missing for [_anonymous_charfilter] char filter of type 'pattern_replace'", e.getMessage()); - } - - public void testVerify_GivenCustomConfigWithInvalidTokenizer() { - Map ignoreStuffInSqaureBrackets = new HashMap<>(); - ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); - ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(ignoreStuffInSqaureBrackets) - .setTokenizer("oops!") - .addTokenFilter("lowercase") - .addTokenFilter("snowball"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("Failed to find global tokenizer under [oops!]", e.getMessage()); - } - - public void testVerify_GivenNoTokenizer() { - Map ignoreStuffInSqaureBrackets = new HashMap<>(); - ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); - ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); - Map ignoreStuffThatBeginsWithADigit = new HashMap<>(); - ignoreStuffThatBeginsWithADigit.put("type", "pattern_replace"); - ignoreStuffThatBeginsWithADigit.put("pattern", "^[0-9].*"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(ignoreStuffInSqaureBrackets) - .addTokenFilter("lowercase") - .addTokenFilter(ignoreStuffThatBeginsWithADigit) - .addTokenFilter("snowball"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is not a global analyzer must specify a [tokenizer] field", e.getMessage()); - } - - public void testVerify_GivenCustomConfigWithInvalidTokenFilter() { - Map ignoreStuffInSqaureBrackets = new HashMap<>(); - ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); - ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(ignoreStuffInSqaureBrackets) - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter("oh dear!"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("Failed to find global token filter under [oh dear!]", e.getMessage()); - } - - public void testVerify_GivenCustomConfigWithMisconfiguredTokenFilter() { - Map noPattern = new HashMap<>(); - noPattern.put("type", "pattern_replace"); - noPattern.put("attern", "should have been pattern"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter("html_strip") - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter(noPattern); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("pattern is missing for [_anonymous_tokenfilter] token filter of type 'pattern_replace'", e.getMessage()); - } - - public void testVerify_GivenAnalyzerAndCharFilter() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .setAnalyzer("standard") - .addCharFilter("html_strip"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [char_filter] field", e.getMessage()); - } - - public void testVerify_GivenAnalyzerAndTokenizer() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .setAnalyzer("standard") - .setTokenizer("classic"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [tokenizer] field", e.getMessage()); - } - - public void testVerify_GivenAnalyzerAndTokenFilter() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .setAnalyzer("standard") - .addTokenFilter("lowercase"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [filter] field", e.getMessage()); - } - @Override protected CategorizationAnalyzerConfig createTestInstance() { return createRandomized().build(); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index 93c74ddf3aa5a..08618b45cf51f 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -39,6 +39,8 @@ import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.xpack.core.XPackClient; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; @@ -47,8 +49,6 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequest; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.monitoring.cleaner.CleanerService; import org.elasticsearch.xpack.monitoring.exporter.ClusterAlertsUtil; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index 1711c0e34eb1e..c7d29451ab3be 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -425,7 +425,8 @@ private QueryBuilder createBoundaryQuery(Map position) { assert lowerBound <= maxBoundary; final RangeQueryBuilder query = new RangeQueryBuilder(fieldName) .gte(lowerBound) - .lt(maxBoundary); + .lt(maxBoundary) + .format("epoch_millis"); return query; } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index bf4f4892ef627..f658fa574eb99 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -29,6 +29,8 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexSettings; @@ -506,6 +508,7 @@ private void executeTestCase(List> docs, RollupJobConfig con private Map createFieldTypes(RollupJobConfig job) { Map fieldTypes = new HashMap<>(); MappedFieldType fieldType = new DateFieldMapper.Builder(job.getGroupConfig().getDateHisto().getField()) + .dateTimeFormatter(Joda.forPattern(randomFrom("basic_date", "date_optional_time", "epoch_second"))) .build(new Mapper.BuilderContext(settings.getSettings(), new ContentPath(0))) .fieldType(); fieldTypes.put(fieldType.name(), fieldType); @@ -618,7 +621,7 @@ protected void doNextSearch(SearchRequest request, ActionListener> getSettings(boolean transportClientMode, List getRestHandlers(Settings settings, RestController restC @Override public Map getProcessors(Processor.Parameters parameters) { - Map processors = new HashMap<>(); - processors.put(SetSecurityUserProcessor.TYPE, new SetSecurityUserProcessor.Factory(parameters.threadContext)); - processors.put(HashProcessor.TYPE, new HashProcessor.Factory(parameters.env.settings())); - return processors; + return Collections.singletonMap(SetSecurityUserProcessor.TYPE, new SetSecurityUserProcessor.Factory(parameters.threadContext)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/ingest/HashProcessor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/ingest/HashProcessor.java deleted file mode 100644 index fa49b843847ee..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/ingest/HashProcessor.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.ingest; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.SecureSetting; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Processor; -import org.elasticsearch.xpack.core.security.SecurityField; - -import javax.crypto.Mac; -import javax.crypto.SecretKeyFactory; -import javax.crypto.spec.PBEKeySpec; -import javax.crypto.spec.SecretKeySpec; -import java.nio.charset.StandardCharsets; -import java.security.InvalidKeyException; -import java.security.NoSuchAlgorithmException; -import java.security.spec.InvalidKeySpecException; -import java.util.Arrays; -import java.util.Base64; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; - -/** - * A processor that hashes the contents of a field (or fields) using various hashing algorithms - */ -public final class HashProcessor extends AbstractProcessor { - public static final String TYPE = "hash"; - public static final Setting.AffixSetting HMAC_KEY_SETTING = SecureSetting - .affixKeySetting(SecurityField.setting("ingest." + TYPE) + ".", "key", - (key) -> SecureSetting.secureString(key, null)); - - private final List fields; - private final String targetField; - private final Method method; - private final Mac mac; - private final byte[] salt; - private final boolean ignoreMissing; - - HashProcessor(String tag, List fields, String targetField, byte[] salt, Method method, @Nullable Mac mac, - boolean ignoreMissing) { - super(tag); - this.fields = fields; - this.targetField = targetField; - this.method = method; - this.mac = mac; - this.salt = salt; - this.ignoreMissing = ignoreMissing; - } - - List getFields() { - return fields; - } - - String getTargetField() { - return targetField; - } - - byte[] getSalt() { - return salt; - } - - @Override - public void execute(IngestDocument document) { - Map hashedFieldValues = fields.stream().map(f -> { - String value = document.getFieldValue(f, String.class, ignoreMissing); - if (value == null && ignoreMissing) { - return new Tuple(null, null); - } - try { - return new Tuple<>(f, method.hash(mac, salt, value)); - } catch (Exception e) { - throw new IllegalArgumentException("field[" + f + "] could not be hashed", e); - } - }).filter(tuple -> Objects.nonNull(tuple.v1())).collect(Collectors.toMap(Tuple::v1, Tuple::v2)); - if (fields.size() == 1) { - document.setFieldValue(targetField, hashedFieldValues.values().iterator().next()); - } else { - document.setFieldValue(targetField, hashedFieldValues); - } - } - - @Override - public String getType() { - return TYPE; - } - - public static final class Factory implements Processor.Factory { - - private final Settings settings; - private final Map secureKeys; - - public Factory(Settings settings) { - this.settings = settings; - this.secureKeys = new HashMap<>(); - HMAC_KEY_SETTING.getAllConcreteSettings(settings).forEach(k -> { - secureKeys.put(k.getKey(), k.get(settings)); - }); - } - - private static Mac createMac(Method method, SecureString password, byte[] salt, int iterations) { - try { - SecretKeyFactory secretKeyFactory = SecretKeyFactory.getInstance("PBKDF2With" + method.getAlgorithm()); - PBEKeySpec keySpec = new PBEKeySpec(password.getChars(), salt, iterations, 128); - byte[] pbkdf2 = secretKeyFactory.generateSecret(keySpec).getEncoded(); - Mac mac = Mac.getInstance(method.getAlgorithm()); - mac.init(new SecretKeySpec(pbkdf2, method.getAlgorithm())); - return mac; - } catch (NoSuchAlgorithmException | InvalidKeySpecException | InvalidKeyException e) { - throw new IllegalArgumentException("invalid settings", e); - } - } - - @Override - public HashProcessor create(Map registry, String processorTag, Map config) { - boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); - List fields = ConfigurationUtils.readList(TYPE, processorTag, config, "fields"); - if (fields.isEmpty()) { - throw ConfigurationUtils.newConfigurationException(TYPE, processorTag, "fields", "must specify at least one field"); - } else if (fields.stream().anyMatch(Strings::isNullOrEmpty)) { - throw ConfigurationUtils.newConfigurationException(TYPE, processorTag, "fields", - "a field-name entry is either empty or null"); - } - String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field"); - String keySettingName = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "key_setting"); - SecureString key = secureKeys.get(keySettingName); - if (key == null) { - throw ConfigurationUtils.newConfigurationException(TYPE, processorTag, "key_setting", - "key [" + keySettingName + "] must match [xpack.security.ingest.hash.*.key]. It is not set"); - } - String saltString = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "salt"); - byte[] salt = saltString.getBytes(StandardCharsets.UTF_8); - String methodProperty = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "method", "SHA256"); - Method method = Method.fromString(processorTag, "method", methodProperty); - int iterations = ConfigurationUtils.readIntProperty(TYPE, processorTag, config, "iterations", 5); - Mac mac = createMac(method, key, salt, iterations); - return new HashProcessor(processorTag, fields, targetField, salt, method, mac, ignoreMissing); - } - } - - enum Method { - SHA1("HmacSHA1"), - SHA256("HmacSHA256"), - SHA384("HmacSHA384"), - SHA512("HmacSHA512"); - - private final String algorithm; - - Method(String algorithm) { - this.algorithm = algorithm; - } - - public String getAlgorithm() { - return algorithm; - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - - public String hash(Mac mac, byte[] salt, String input) { - try { - byte[] encrypted = mac.doFinal(input.getBytes(StandardCharsets.UTF_8)); - byte[] messageWithSalt = new byte[salt.length + encrypted.length]; - System.arraycopy(salt, 0, messageWithSalt, 0, salt.length); - System.arraycopy(encrypted, 0, messageWithSalt, salt.length, encrypted.length); - return Base64.getEncoder().encodeToString(messageWithSalt); - } catch (IllegalStateException e) { - throw new ElasticsearchException("error hashing data", e); - } - } - - public static Method fromString(String processorTag, String propertyName, String type) { - try { - return Method.valueOf(type.toUpperCase(Locale.ROOT)); - } catch(IllegalArgumentException e) { - throw newConfigurationException(TYPE, processorTag, propertyName, "type [" + type + - "] not supported, cannot convert field. Valid hash methods: " + Arrays.toString(Method.values())); - } - } - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java index 382230684c77f..fa7791689aaa1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java @@ -349,7 +349,10 @@ private void handshake() throws SSLException { if (hasFlushPending() == false) { handshakeStatus = wrap(EMPTY_BUFFER_ARRAY).getHandshakeStatus(); } - continueHandshaking = false; + // If we need NEED_TASK we should run the tasks immediately + if (handshakeStatus != SSLEngineResult.HandshakeStatus.NEED_TASK) { + continueHandshaking = false; + } break; case NEED_TASK: runTasks(); @@ -432,8 +435,16 @@ private void runTasks() { } private void maybeFinishHandshake() { - // We only acknowledge that we are done handshaking if there are no bytes that need to be written - if (hasFlushPending() == false) { + if (engine.isOutboundDone() || engine.isInboundDone()) { + // If the engine is partially closed, immediate transition to close mode. + if (currentMode.isHandshake()) { + currentMode = new CloseMode(true); + } else { + String message = "Expected to be in handshaking mode. Instead in non-handshaking mode: " + currentMode; + throw new AssertionError(message); + } + } else if (hasFlushPending() == false) { + // We only acknowledge that we are done handshaking if there are no bytes that need to be written if (currentMode.isHandshake()) { currentMode = new ApplicationMode(); } else { @@ -510,7 +521,7 @@ private CloseMode(boolean isHandshaking) { if (isHandshaking && engine.isInboundDone() == false) { // If we attempt to close during a handshake either we are sending an alert and inbound // should already be closed or we are sending a close_notify. If we send a close_notify - // the peer will send an handshake error alert. If we attempt to receive the handshake alert, + // the peer might send an handshake error alert. If we attempt to receive the handshake alert, // the engine will throw an IllegalStateException as it is not in a proper state to receive // handshake message. Closing inbound immediately after close_notify is the cleanest option. needToReceiveClose = false; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index d7d8c2ceb1be2..dd41bc5a7fdb7 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -30,8 +30,10 @@ import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -161,10 +163,12 @@ public Settings nodeSettings(int nodeOrdinal) { .build(); } - public void testQuery() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=text", "field2", "type=text", "field3", "type=text") - ); + public void testQuery() { + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", + "field1", "type=text", + "field2", "type=text", + "field3", "type=text", + "alias", "type=alias,path=field1")); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2", "field3", "value3") .setRefreshPolicy(IMMEDIATE) .get(); @@ -299,6 +303,20 @@ public void testQuery() throws Exception { .setQuery(matchQuery("field3", "value3")) .get(); assertHitCount(response, 0); + + // user1 has access to field1, so a query on its field alias should match with the document: + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("alias", "value1")) + .get(); + assertHitCount(response, 1); + // user2 has no access to field1, so a query on its field alias should not match with the document: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("alias", "value1")) + .get(); + assertHitCount(response, 0); } public void testGetApi() throws Exception { @@ -793,10 +811,11 @@ public void testRequestCache() throws Exception { } public void testFields() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=text,store=true", "field2", "type=text,store=true", - "field3", "type=text,store=true") - ); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", + "field1", "type=text,store=true", + "field2", "type=text,store=true", + "field3", "type=text,store=true", + "alias", "type=alias,path=field1")); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2", "field3", "value3") .setRefreshPolicy(IMMEDIATE) .get(); @@ -888,6 +907,22 @@ public void testFields() throws Exception { assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); + + // user1 is granted access to field1 only, and so should be able to load it by alias: + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("alias") + .get(); + assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); + assertThat(response.getHits().getAt(0).getFields().get("alias").getValue(), equalTo("value1")); + + // user2 is not granted access to field1, and so should not be able to load it by alias: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("alias") + .get(); + assertThat(response.getHits().getAt(0).getFields().size(), equalTo(0)); } public void testSource() throws Exception { @@ -963,11 +998,11 @@ public void testSource() throws Exception { assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); } - public void testSort() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=long", "field2", "type=long") - ); - + public void testSort() { + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", + "field1", "type=long", + "field2", "type=long", + "alias", "type=alias,path=field1")); client().prepareIndex("test", "type1", "1").setSource("field1", 1d, "field2", 2d) .setRefreshPolicy(IMMEDIATE) .get(); @@ -1000,12 +1035,81 @@ public void testSort() throws Exception { .addSort("field2", SortOrder.ASC) .get(); assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(2L)); + + // user1 is granted to use field1, so it is included in the sort_values when using its alias: + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addSort("alias", SortOrder.ASC) + .get(); + assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(1L)); + + // user2 is not granted to use field1, so the default missing sort value is included when using its alias: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addSort("alias", SortOrder.ASC) + .get(); + assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(Long.MAX_VALUE)); } - public void testAggs() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=text,fielddata=true", "field2", "type=text,fielddata=true") - ); + public void testHighlighting() { + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", + "field1", "type=text", + "field2", "type=text", + "field3", "type=text", + "alias", "type=alias,path=field1")); + client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2", "field3", "value3") + .setRefreshPolicy(IMMEDIATE) + .get(); + + // user1 has access to field1, so the highlight should be visible: + SearchResponse response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field1", "value1")) + .highlighter(new HighlightBuilder().field("field1")) + .get(); + assertHitCount(response, 1); + SearchHit hit = response.getHits().iterator().next(); + assertEquals(hit.getHighlightFields().size(), 1); + + // user2 has no access to field1, so the highlight should not be visible: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field2", "value2")) + .highlighter(new HighlightBuilder().field("field1")) + .get(); + assertHitCount(response, 1); + hit = response.getHits().iterator().next(); + assertEquals(hit.getHighlightFields().size(), 0); + + // user1 has access to field1, so the highlight on its alias should be visible: + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field1", "value1")) + .highlighter(new HighlightBuilder().field("alias")) + .get(); + assertHitCount(response, 1); + hit = response.getHits().iterator().next(); + assertEquals(hit.getHighlightFields().size(), 1); + + // user2 has no access to field1, so the highlight on its alias should not be visible: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field2", "value2")) + .highlighter(new HighlightBuilder().field("alias")) + .get(); + assertHitCount(response, 1); + hit = response.getHits().iterator().next(); + assertEquals(hit.getHighlightFields().size(), 0); + } + + public void testAggs() { + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", + "field1", "type=text,fielddata=true", + "field2", "type=text,fielddata=true", + "alias", "type=alias,path=field1")); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2") .setRefreshPolicy(IMMEDIATE) .get(); @@ -1038,6 +1142,21 @@ public void testAggs() throws Exception { .addAggregation(AggregationBuilders.terms("_name").field("field2")) .get(); assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value2").getDocCount(), equalTo(1L)); + + // user1 is authorized to use field1, so buckets are include for a term agg on its alias: + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.terms("_name").field("alias")) + .get(); + assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value1").getDocCount(), equalTo(1L)); + + // user2 is not authorized to use field1, so no buckets are include for a term agg on its alias: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.terms("_name").field("alias")) + .get(); + assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value1"), nullValue()); } public void testTVApi() throws Exception { @@ -1218,15 +1337,22 @@ public void testMTVApi() throws Exception { public void testParentChild() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .endObject() - .endObject() - .endObject() - .endObject(); + .startObject("properties") + .startObject("field1") + .field("type", "keyword") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "field1") + .endObject() + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .endObject() + .endObject() + .endObject() + .endObject(); assertAcked(prepareCreate("test") .addMapping("doc", mapping)); ensureGreen(); @@ -1264,6 +1390,23 @@ private void verifyParentChild() { .setQuery(hasChildQuery("child", termQuery("field1", "yellow"), ScoreMode.None)) .get(); assertHitCount(searchResponse, 0L); + + // Perform the same checks, but using an alias for field1. + searchResponse = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(hasChildQuery("child", termQuery("alias", "yellow"), ScoreMode.None)) + .get(); + assertHitCount(searchResponse, 1L); + assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); + assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); + + searchResponse = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(hasChildQuery("child", termQuery("alias", "yellow"), ScoreMode.None)) + .get(); + assertHitCount(searchResponse, 0L); } public void testUpdateApiIsBlocked() throws Exception { @@ -1315,10 +1458,9 @@ public void testUpdateApiIsBlocked() throws Exception { assertThat(client().prepareGet("test", "type", "1").get().getSource().get("field2").toString(), equalTo("value3")); } - public void testQuery_withRoleWithFieldWildcards() throws Exception { + public void testQuery_withRoleWithFieldWildcards() { assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=text", "field2", "type=text") - ); + .addMapping("type1", "field1", "type=text", "field2", "type=text")); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2") .setRefreshPolicy(IMMEDIATE) .get(); @@ -1345,9 +1487,12 @@ public void testQuery_withRoleWithFieldWildcards() throws Exception { } public void testExistQuery() { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=text", "field2", "type=text", "field3", "type=text") - ); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", + "field1", "type=text", + "field2", "type=text", + "field3", "type=text", + "alias", "type=alias,path=field1")); + client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2", "field3", "value3") .setRefreshPolicy(IMMEDIATE) .get(); @@ -1402,6 +1547,20 @@ public void testExistQuery() { .setQuery(existsQuery("field2")) .get(); assertHitCount(response, 0); + + // user1 has access to field1, so a query on its alias should match with the document: + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(existsQuery("alias")) + .get(); + assertHitCount(response, 1); + // user2 has no access to field1, so the query should not match with the document: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(existsQuery("alias")) + .get(); + assertHitCount(response, 0); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index df1456c3790b2..56d5fec3f20d6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -42,6 +42,7 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; import static org.apache.lucene.util.LuceneTestCase.createTempFile; +import static org.elasticsearch.test.ESTestCase.inFipsJvm; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.xpack.security.test.SecurityTestUtils.writeFile; @@ -103,7 +104,12 @@ public SecuritySettingsSource(int numOfNodes, boolean sslEnabled, Path parentFol this.subfolderPrefix = scope.name(); this.sslEnabled = sslEnabled; this.hostnameVerificationEnabled = randomBoolean(); - this.usePEM = randomBoolean(); + // Use PEM instead of JKS stores so that we can run these in a FIPS 140 JVM + if (inFipsJvm()) { + this.usePEM = true; + } else { + this.usePEM = randomBoolean(); + } } Path nodePath(final int nodeOrdinal) { @@ -137,7 +143,7 @@ public Settings nodeSettings(int nodeOrdinal) { .put("xpack.security.authc.realms.file.type", FileRealmSettings.TYPE) .put("xpack.security.authc.realms.file.order", 0) .put("xpack.security.authc.realms.index.type", NativeRealmSettings.TYPE) - .put("xpack.security.authc.realms.index.order", "1"); + .put("xpack.security.authc.realms.index.order", "1"); addNodeSSLSettings(builder); return builder.build(); } @@ -288,6 +294,22 @@ private static void addSSLSettingsForStore(Settings.Builder builder, String pref } } + /** + * Returns the SSL related configuration settings given the location of a key and certificate and the location + * of the PEM certificates to be trusted + * + * @param keyPath The path to the Private key to be used for SSL + * @param password The password with which the private key is protected + * @param certificatePath The path to the PEM formatted Certificate encapsulating the public key that corresponds + * to the Private Key specified in {@code keyPath}. Will be presented to incoming + * SSL connections. + * @param trustedCertificates A list of PEM formatted certificates that will be trusted. + */ + public static void addSSLSettingsForPEMFiles(Settings.Builder builder, String keyPath, String password, + String certificatePath, List trustedCertificates) { + addSSLSettingsForPEMFiles(builder, "", keyPath, password, certificatePath, trustedCertificates, true, true, true); + } + private static void addSSLSettingsForPEMFiles(Settings.Builder builder, String prefix, String keyPath, String password, String certificatePath, List trustedCertificates, boolean sslEnabled, boolean hostnameVerificationEnabled, boolean transportClient) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java index 6447be7e69cbe..1886dd4249b14 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java @@ -60,13 +60,16 @@ public void testFiltering() throws Exception { // pki filtering configureUnfilteredSetting("xpack.security.authc.realms.pki1.type", "pki"); configureUnfilteredSetting("xpack.security.authc.realms.pki1.order", "0"); - configureFilteredSetting("xpack.security.authc.realms.pki1.truststore.path", + if (inFipsJvm() == false) { + configureFilteredSetting("xpack.security.authc.realms.pki1.truststore.path", getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks").toString()); + configureFilteredSetting("xpack.ssl.keystore.path", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks").toString()); + } configureSecureSetting("xpack.security.authc.realms.pki1.truststore.secure_password", "truststore-testnode-only"); configureFilteredSetting("xpack.security.authc.realms.pki1.truststore.algorithm", "SunX509"); - configureFilteredSetting("xpack.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks").toString()); + configureFilteredSetting("xpack.ssl.cipher_suites", Strings.arrayToCommaDelimitedString(XPackSettings.DEFAULT_CIPHERS.toArray())); configureFilteredSetting("xpack.ssl.supported_protocols", randomFrom("TLSv1", "TLSv1.1", "TLSv1.2")); @@ -78,8 +81,10 @@ public void testFiltering() throws Exception { // client profile configureUnfilteredSetting("transport.profiles.client.port", "9500-9600"); - configureFilteredSetting("transport.profiles.client.xpack.security.ssl.keystore.path", + if (inFipsJvm() == false) { + configureFilteredSetting("transport.profiles.client.xpack.security.ssl.keystore.path", getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks").toString()); + } configureFilteredSetting("transport.profiles.client.xpack.security.ssl.cipher_suites", Strings.arrayToCommaDelimitedString(XPackSettings.DEFAULT_CIPHERS.toArray())); configureFilteredSetting("transport.profiles.client.xpack.security.ssl.supported_protocols", diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheckTests.java index f9b1be65736e1..6966b7edf67d8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheckTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheckTests.java @@ -100,16 +100,18 @@ public void testBootstrapCheckWithDisabledRealm() throws Exception { public void testBootstrapCheckWithClosedSecuredSetting() throws Exception { final boolean expectFail = randomBoolean(); final MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.security.http.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.security.http.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.security.authc.realms.test_pki.type", PkiRealmSettings.TYPE) - .put("xpack.security.http.ssl.enabled", true) - .put("xpack.security.http.ssl.client_authentication", expectFail ? "none" : "optional") - .put("xpack.security.http.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.security.authc.realms.test_pki.type", PkiRealmSettings.TYPE) + .put("xpack.security.http.ssl.enabled", true) + .put("xpack.security.http.ssl.client_authentication", expectFail ? "none" : "optional") + .put("xpack.security.http.ssl.key", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) + .put("xpack.security.http.ssl.certificate", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); Environment env = TestEnvironment.newEnvironment(settings); final PkiRealmBootstrapCheck check = new PkiRealmBootstrapCheck(new SSLService(settings, env)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java index 14b0a58419a22..212ee7ea499ec 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java @@ -20,6 +20,7 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Path; +import java.util.Arrays; import java.util.HashSet; import java.util.Set; @@ -92,8 +93,12 @@ public void testRetrieveUsers() throws Exception { Settings.Builder builder = Settings.builder() .put("path.home", home) .put("path.conf", conf.toString()); - SecuritySettingsSource.addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + SecuritySettingsSource.addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); Settings settings = builder.build(); logger.error("--> retrieving users using URL: {}, home: {}", url, home); @@ -134,8 +139,11 @@ public void testRetrieveRoles() throws Exception { String url = getHttpURL(); ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles(); Settings.Builder builder = Settings.builder().put("path.home", home); - SecuritySettingsSource.addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks", "testclient"); + SecuritySettingsSource.addSSLSettingsForPEMFiles(builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem", + "testclient", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); Settings settings = builder.build(); logger.error("--> retrieving roles using URL: {}, home: {}", url, home); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java index 0d689adcdf594..9b8c3878a038d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java @@ -49,20 +49,23 @@ public void shutdown() throws Exception { } public void testCommandLineHttpClientCanExecuteAndReturnCorrectResultUsingSSLSettings() throws Exception { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); MockSecureSettings secureSettings = new MockSecureSettings(); Settings settings; if (randomBoolean()) { // with http ssl settings secureSettings.setString("xpack.security.http.ssl.truststore.secure_password", "testnode"); - settings = Settings.builder().put("xpack.security.http.ssl.truststore.path", resource.toString()) + settings = Settings.builder().put("xpack.security.http.ssl.certificate_authorities", certPath.toString()) .put("xpack.security.http.ssl.verification_mode", VerificationMode.CERTIFICATE).setSecureSettings(secureSettings) .build(); } else { // with global settings secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); - settings = Settings.builder().put("xpack.ssl.truststore.path", resource.toString()) - .put("xpack.ssl.verification_mode", VerificationMode.CERTIFICATE).setSecureSettings(secureSettings).build(); + settings = Settings.builder() + .put("xpack.ssl.certificate_authorities", certPath.toString()) + .put("xpack.ssl.verification_mode", VerificationMode.CERTIFICATE) + .setSecureSettings(secureSettings) + .build(); } CommandLineHttpClient client = new CommandLineHttpClient(settings, environment); HttpResponse httpResponse = client.execute("GET", new URL("https://localhost:" + webServer.getPort() + "/test"), "u1", @@ -74,11 +77,15 @@ public void testCommandLineHttpClientCanExecuteAndReturnCorrectResultUsingSSLSet } private MockWebServer createMockWebServer() { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); - Settings settings = - Settings.builder().put("xpack.ssl.keystore.path", resource.toString()).setSecureSettings(secureSettings).build(); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); + Settings settings = Settings.builder() + .put("xpack.ssl.key", keyPath.toString()) + .put("xpack.ssl.certificate", certPath.toString()) + .setSecureSettings(secureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings, environment); return new MockWebServer(sslService.sslContext(), false); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java index 26cd513ec78e5..23010e400a52b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java @@ -34,8 +34,8 @@ public abstract class GroupsResolverTestCase extends ESTestCase { @Before public void setUpLdapConnection() throws Exception { - Path truststore = getDataPath(trustPath()); - this.ldapConnection = LdapTestUtils.openConnection(ldapUrl(), bindDN(), bindPassword(), truststore); + Path trustPath = getDataPath(trustPath()); + this.ldapConnection = LdapTestUtils.openConnection(ldapUrl(), bindDN(), bindPassword(), trustPath); } @After diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java index 8bdfd02d2fcc5..966f2e3f5492d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java @@ -67,6 +67,6 @@ public static LDAPConnection openConnection(String url, String bindDN, String bi sslConfiguration = sslService.getSSLConfiguration("xpack.security.authc.realms.foo.ssl"); } return LdapUtils.privilegedConnect(() -> new LDAPConnection(sslService.sslSocketFactory(sslConfiguration), options, - ldapurl.getHost(), ldapurl.getPort(), bindDN, bindPassword)); + ldapurl.getHost(), ldapurl.getPort(), bindDN, bindPassword)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java index 9d8fd1544f5a6..19b0d4e71bb8a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java @@ -54,7 +54,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { @Before public void init() throws Exception { - Path keystore = getDataPath("support/ADtrust.jks"); + Path certPath = getDataPath("support/smb_ca.crt"); Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. @@ -63,10 +63,9 @@ public void init() throws Exception { */ globalSettings = Settings.builder() - .put("path.home", createTempDir()) - .put("xpack.ssl.truststore.path", keystore) - .setSecureSettings(newSecureSettings("xpack.ssl.truststore.secure_password", "changeit")) - .build(); + .put("path.home", createTempDir()) + .put("xpack.ssl.certificate_authorities", certPath) + .build(); sslService = new SSLService(globalSettings, env); threadPool = new TestThreadPool("LdapUserSearchSessionFactoryTests"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java index c458a9c42eabb..931acc1e79dca 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java @@ -26,22 +26,24 @@ import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import org.elasticsearch.xpack.core.ssl.SSLClientAuth; import org.elasticsearch.xpack.security.LocalStateSecurity; -import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.TrustManager; -import java.io.InputStream; import java.net.InetSocketAddress; -import java.nio.file.Files; -import java.nio.file.Path; -import java.security.KeyStore; import java.security.SecureRandom; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; import java.util.Locale; +import java.util.stream.Collectors; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; @@ -60,16 +62,16 @@ protected Settings nodeSettings() { SSLClientAuth sslClientAuth = randomBoolean() ? SSLClientAuth.REQUIRED : SSLClientAuth.OPTIONAL; Settings.Builder builder = Settings.builder() - .put(super.nodeSettings()) - .put("xpack.security.http.ssl.enabled", true) - .put("xpack.security.http.ssl.client_authentication", sslClientAuth) - .put("xpack.security.authc.realms.file.type", FileRealmSettings.TYPE) - .put("xpack.security.authc.realms.file.order", "0") - .put("xpack.security.authc.realms.pki1.type", PkiRealmSettings.TYPE) - .put("xpack.security.authc.realms.pki1.order", "1") - .put("xpack.security.authc.realms.pki1.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .put("xpack.security.authc.realms.pki1.files.role_mapping", getDataPath("role_mapping.yml")); + .put(super.nodeSettings()) + .put("xpack.security.http.ssl.enabled", true) + .put("xpack.security.http.ssl.client_authentication", sslClientAuth) + .put("xpack.security.authc.realms.file.type", FileRealmSettings.TYPE) + .put("xpack.security.authc.realms.file.order", "0") + .put("xpack.security.authc.realms.pki1.type", PkiRealmSettings.TYPE) + .put("xpack.security.authc.realms.pki1.order", "1") + .put("xpack.security.authc.realms.pki1.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("xpack.security.authc.realms.pki1.files.role_mapping", getDataPath("role_mapping.yml")); SecuritySettingsSource.addSecureSettings(builder, secureSettings -> secureSettings.setString("xpack.security.authc.realms.pki1.truststore.secure_password", "truststore-testnode-only")); @@ -90,7 +92,13 @@ protected boolean enableWarningsCheck() { public void testTransportClientCanAuthenticateViaPki() { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList + ("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (TransportClient client = createTransportClient(builder.build())) { client.addTransportAddress(randomFrom(node().injector().getInstance(Transport.class).boundAddress().boundAddresses())); IndexResponse response = client.prepareIndex("foo", "bar").setSource("pki", "auth").get(); @@ -113,7 +121,11 @@ public void testTransportClientAuthenticationFailure() { } public void testRestAuthenticationViaPki() throws Exception { - SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { HttpPut put = new HttpPut(getNodeUrl() + "foo"); try (CloseableHttpResponse response = SocketAccess.doPrivileged(() -> client.execute(put))) { @@ -124,7 +136,10 @@ public void testRestAuthenticationViaPki() throws Exception { } public void testRestAuthenticationFailure() throws Exception { - SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks", "testclient"); + SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem", + "testclient", "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { HttpPut put = new HttpPut(getNodeUrl() + "foo"); try (CloseableHttpResponse response = SocketAccess.doPrivileged(() -> client.execute(put))) { @@ -135,21 +150,13 @@ public void testRestAuthenticationFailure() throws Exception { } } - private SSLContext getRestSSLContext(String keystoreResourcePath, String password) throws Exception { + private SSLContext getRestSSLContext(String keyPath, String password, String certPath, List trustedCertPaths) throws Exception { SSLContext context = SSLContext.getInstance("TLS"); - KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - Path store = getDataPath(keystoreResourcePath); - KeyStore ks; - try (InputStream in = Files.newInputStream(store)) { - ks = KeyStore.getInstance("jks"); - ks.load(in, password.toCharArray()); - } - - kmf.init(ks, password.toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(ks); - context.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); - + TrustManager tm = CertParsingUtils.trustManager(CertParsingUtils.readCertificates(trustedCertPaths.stream().map(p -> getDataPath + (p)).collect(Collectors.toList()))); + KeyManager km = CertParsingUtils.keyManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath + (certPath))), PemUtils.readPrivateKey(getDataPath(keyPath), password::toCharArray), password.toCharArray()); + context.init(new KeyManager[]{km}, new TrustManager[]{tm}, new SecureRandom()); return context; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java index 7f79ae35adac2..5a7015a4e8dfa 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java @@ -128,6 +128,8 @@ public class SamlAuthenticatorTests extends SamlTestCase { @BeforeClass public static void init() throws Exception { + assumeFalse("Can't run in a FIPS JVM, there is no DOM XMLSignature Factory so we can't sign XML documents", inFipsJvm()); + // TODO: Refactor the signing to use org.opensaml.xmlsec.signature.support.Signer so that we can run the tests SamlUtils.initialize(Loggers.getLogger(SamlAuthenticatorTests.class)); // Initialise Apache XML security so that the signDoc methods work correctly. Init.init(); @@ -218,7 +220,7 @@ public void testParseContentWithNoAssertionsIsRejected() throws Exception { "" + IDP_ENTITY_ID + "" + "" + - ""); + ""); final ElasticsearchSecurityException exception = expectSamlException(() -> authenticator.authenticate(token)); assertThat(exception.getMessage(), containsString("No assertions found in SAML response")); assertThat(exception.getCause(), nullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java index cf41673b86bce..e239c8706b99f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java @@ -296,6 +296,7 @@ public void testHandleAttributesInBatchMode() throws Exception { } public void testSigningMetadataWithPfx() throws Exception { + assumeFalse("Can't run in a FIPS JVM, PKCS12 keystores are not usable", inFipsJvm()); final Path certPath = getDataPath("saml.crt"); final Path keyPath = getDataPath("saml.key"); final Path p12Path = getDataPath("saml.p12"); @@ -355,6 +356,7 @@ public void testSigningMetadataWithPfx() throws Exception { } public void testSigningMetadataWithPasswordProtectedPfx() throws Exception { + assumeFalse("Can't run in a FIPS JVM, PKCS12 keystores are not usable", inFipsJvm()); final Path certPath = getDataPath("saml.crt"); final Path keyPath = getDataPath("saml.key"); final Path p12Path = getDataPath("saml_with_password.p12"); @@ -393,11 +395,13 @@ public void testSigningMetadataWithPasswordProtectedPfx() throws Exception { public void testErrorSigningMetadataWithWrongPassword() throws Exception { final Path certPath = getDataPath("saml.crt"); final Path keyPath = getDataPath("saml.key"); - final Path p12Path = getDataPath("saml_with_password.p12"); + final Path signingKeyPath = getDataPath("saml_with_password.key"); final SamlMetadataCommand command = new SamlMetadataCommand((e) -> randomFrom(keyStore, null)); final OptionSet options = command.getParser().parse(new String[]{ - "-signing-bundle", p12Path.toString(), - "-signing-key-password", "wrong_password" + "-signing-cert", certPath.toString(), + "-signing-key", signingKeyPath.toString(), + "-signing-key-password", "wrongpassword" + }); final boolean useSigningCredentials = randomBoolean(); @@ -422,7 +426,7 @@ public void testErrorSigningMetadataWithWrongPassword() throws Exception { final UserException userException = expectThrows(UserException.class, () -> command.possiblySignDescriptor(terminal, options, descriptor, env)); assertThat(userException.getMessage(), containsString("Unable to create metadata document")); - assertThat(terminal.getOutput(), containsString("keystore password was incorrect")); + assertThat(terminal.getOutput(), containsString("Error parsing Private Key from")); } public void testSigningMetadataWithPem() throws Exception { @@ -473,7 +477,7 @@ public void testSigningMetadataWithPasswordProtectedPem() throws Exception { final OptionSet options = command.getParser().parse(new String[]{ "-signing-cert", certPath.toString(), "-signing-key", signingKeyPath.toString(), - "-signing-key-password", "saml" + "-signing-key-password", "saml" }); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java index 6dc9c021fc813..980abc46831c6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java @@ -105,13 +105,17 @@ public void testReadIdpMetadataFromHttps() throws Exception { final Path path = getDataPath("idp1.xml"); final String body = new String(Files.readAllBytes(path), StandardCharsets.UTF_8); final MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + mockSecureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); final Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .put("path.home", createTempDir()) - .setSecureSettings(mockSecureSettings) - .build(); + .put("xpack.ssl.key", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) + .put("xpack.ssl.certificate", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("path.home", createTempDir()) + .setSecureSettings(mockSecureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings, TestEnvironment.newEnvironment(settings)); try (MockWebServer proxyServer = new MockWebServer(sslService.sslContext(Settings.EMPTY), false)) { proxyServer.start(); @@ -563,17 +567,21 @@ private Tuple buildConfig(String path) throws Exception private Settings.Builder buildSettings(String idpMetaDataPath) { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(REALM_SETTINGS_PREFIX + ".ssl.keystore.secure_password", "testnode"); + secureSettings.setString(REALM_SETTINGS_PREFIX + ".ssl.secure_key_passphrase", "testnode"); return Settings.builder() - .put(REALM_SETTINGS_PREFIX + ".ssl.verification_mode", "certificate") - .put(REALM_SETTINGS_PREFIX + ".ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .put(REALM_SETTINGS_PREFIX + ".type", "saml") - .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_METADATA_PATH.getKey(), idpMetaDataPath) - .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_ENTITY_ID.getKey(), TEST_IDP_ENTITY_ID) - .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_METADATA_HTTP_REFRESH.getKey(), METADATA_REFRESH + "ms") - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings); + .put(REALM_SETTINGS_PREFIX + ".ssl.verification_mode", "certificate") + .put(REALM_SETTINGS_PREFIX + ".ssl.key", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) + .put(REALM_SETTINGS_PREFIX + ".ssl.certificate", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put(REALM_SETTINGS_PREFIX + ".ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put(REALM_SETTINGS_PREFIX + ".type", "saml") + .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_METADATA_PATH.getKey(), idpMetaDataPath) + .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_ENTITY_ID.getKey(), TEST_IDP_ENTITY_ID) + .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_METADATA_HTTP_REFRESH.getKey(), METADATA_REFRESH + "ms") + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings); } private RealmConfig realmConfigFromRealmSettings(Settings realmSettings) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorFactoryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorFactoryTests.java deleted file mode 100644 index e9dda488e7216..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorFactoryTests.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.ingest; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.settings.MockSecureSettings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; - -import java.nio.charset.StandardCharsets; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import static org.hamcrest.Matchers.equalTo; - -public class HashProcessorFactoryTests extends ESTestCase { - - public void testProcessor() { - MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key"); - Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("fields", Collections.singletonList("_field")); - config.put("target_field", "_target"); - config.put("salt", "_salt"); - config.put("key_setting", "xpack.security.ingest.hash.processor.key"); - for (HashProcessor.Method method : HashProcessor.Method.values()) { - config.put("method", method.toString()); - HashProcessor processor = factory.create(null, "_tag", new HashMap<>(config)); - assertThat(processor.getFields(), equalTo(Collections.singletonList("_field"))); - assertThat(processor.getTargetField(), equalTo("_target")); - assertArrayEquals(processor.getSalt(), "_salt".getBytes(StandardCharsets.UTF_8)); - } - } - - public void testProcessorNoFields() { - MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key"); - Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("target_field", "_target"); - config.put("salt", "_salt"); - config.put("key_setting", "xpack.security.ingest.hash.processor.key"); - config.put("method", HashProcessor.Method.SHA1.toString()); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", config)); - assertThat(e.getMessage(), equalTo("[fields] required property is missing")); - } - - public void testProcessorNoTargetField() { - MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key"); - Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("fields", Collections.singletonList("_field")); - config.put("salt", "_salt"); - config.put("key_setting", "xpack.security.ingest.hash.processor.key"); - config.put("method", HashProcessor.Method.SHA1.toString()); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", config)); - assertThat(e.getMessage(), equalTo("[target_field] required property is missing")); - } - - public void testProcessorFieldsIsEmpty() { - MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key"); - Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("fields", Collections.singletonList(randomBoolean() ? "" : null)); - config.put("salt", "_salt"); - config.put("target_field", "_target"); - config.put("key_setting", "xpack.security.ingest.hash.processor.key"); - config.put("method", HashProcessor.Method.SHA1.toString()); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", config)); - assertThat(e.getMessage(), equalTo("[fields] a field-name entry is either empty or null")); - } - - public void testProcessorMissingSalt() { - MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key"); - Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("fields", Collections.singletonList("_field")); - config.put("target_field", "_target"); - config.put("key_setting", "xpack.security.ingest.hash.processor.key"); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", config)); - assertThat(e.getMessage(), equalTo("[salt] required property is missing")); - } - - public void testProcessorInvalidMethod() { - MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key"); - Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("fields", Collections.singletonList("_field")); - config.put("salt", "_salt"); - config.put("target_field", "_target"); - config.put("key_setting", "xpack.security.ingest.hash.processor.key"); - config.put("method", "invalid"); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", config)); - assertThat(e.getMessage(), equalTo("[method] type [invalid] not supported, cannot convert field. " + - "Valid hash methods: [sha1, sha256, sha384, sha512]")); - } - - public void testProcessorInvalidOrMissingKeySetting() { - Settings settings = Settings.builder().setSecureSettings(new MockSecureSettings()).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("fields", Collections.singletonList("_field")); - config.put("salt", "_salt"); - config.put("target_field", "_target"); - config.put("key_setting", "invalid"); - config.put("method", HashProcessor.Method.SHA1.toString()); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", new HashMap<>(config))); - assertThat(e.getMessage(), - equalTo("[key_setting] key [invalid] must match [xpack.security.ingest.hash.*.key]. It is not set")); - config.remove("key_setting"); - ElasticsearchException ex = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", config)); - assertThat(ex.getMessage(), equalTo("[key_setting] required property is missing")); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorTests.java deleted file mode 100644 index b3890600592f5..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorTests.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.ingest; - -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.security.ingest.HashProcessor.Method; - -import javax.crypto.Mac; -import javax.crypto.SecretKeyFactory; -import javax.crypto.spec.PBEKeySpec; -import javax.crypto.spec.SecretKeySpec; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Base64; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.equalTo; - -public class HashProcessorTests extends ESTestCase { - - @SuppressWarnings("unchecked") - public void testIgnoreMissing() throws Exception { - Method method = randomFrom(Method.values()); - Mac mac = createMac(method); - Map fields = new HashMap<>(); - fields.put("one", "foo"); - HashProcessor processor = new HashProcessor("_tag", Arrays.asList("one", "two"), - "target", "_salt".getBytes(StandardCharsets.UTF_8), Method.SHA1, mac, true); - IngestDocument ingestDocument = new IngestDocument(fields, new HashMap<>()); - processor.execute(ingestDocument); - Map target = ingestDocument.getFieldValue("target", Map.class); - assertThat(target.size(), equalTo(1)); - assertNotNull(target.get("one")); - - HashProcessor failProcessor = new HashProcessor("_tag", Arrays.asList("one", "two"), - "target", "_salt".getBytes(StandardCharsets.UTF_8), Method.SHA1, mac, false); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> failProcessor.execute(ingestDocument)); - assertThat(exception.getMessage(), equalTo("field [two] not present as part of path [two]")); - } - - public void testStaticKeyAndSalt() throws Exception { - byte[] salt = "_salt".getBytes(StandardCharsets.UTF_8); - SecretKeyFactory secretKeyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1"); - PBEKeySpec keySpec = new PBEKeySpec("hmackey".toCharArray(), salt, 5, 128); - byte[] pbkdf2 = secretKeyFactory.generateSecret(keySpec).getEncoded(); - Mac mac = Mac.getInstance(Method.SHA1.getAlgorithm()); - mac.init(new SecretKeySpec(pbkdf2, Method.SHA1.getAlgorithm())); - Map fields = new HashMap<>(); - fields.put("field", "0123456789"); - HashProcessor processor = new HashProcessor("_tag", Collections.singletonList("field"), - "target", salt, Method.SHA1, mac, false); - IngestDocument ingestDocument = new IngestDocument(fields, new HashMap<>()); - processor.execute(ingestDocument); - assertThat(ingestDocument.getFieldValue("target", String.class), equalTo("X3NhbHQMW0oHJGEEE9obGcGv5tGd7HFyDw==")); - } - - public void testProcessorSingleField() throws Exception { - List fields = Collections.singletonList(randomAlphaOfLength(6)); - Map docFields = new HashMap<>(); - for (String field : fields) { - docFields.put(field, randomAlphaOfLengthBetween(2, 10)); - } - - String targetField = randomAlphaOfLength(6); - Method method = randomFrom(Method.values()); - Mac mac = createMac(method); - byte[] salt = randomByteArrayOfLength(5); - HashProcessor processor = new HashProcessor("_tag", fields, targetField, salt, method, mac, false); - IngestDocument ingestDocument = new IngestDocument(docFields, new HashMap<>()); - processor.execute(ingestDocument); - - String targetFieldValue = ingestDocument.getFieldValue(targetField, String.class); - Object expectedTargetFieldValue = method.hash(mac, salt, ingestDocument.getFieldValue(fields.get(0), String.class)); - assertThat(targetFieldValue, equalTo(expectedTargetFieldValue)); - byte[] bytes = Base64.getDecoder().decode(targetFieldValue); - byte[] actualSaltPrefix = new byte[salt.length]; - System.arraycopy(bytes, 0, actualSaltPrefix, 0, salt.length); - assertArrayEquals(salt, actualSaltPrefix); - } - - @SuppressWarnings("unchecked") - public void testProcessorMultipleFields() throws Exception { - List fields = new ArrayList<>(); - for (int i = 0; i < randomIntBetween(2, 10); i++) { - fields.add(randomAlphaOfLength(5 + i)); - } - Map docFields = new HashMap<>(); - for (String field : fields) { - docFields.put(field, randomAlphaOfLengthBetween(2, 10)); - } - - String targetField = randomAlphaOfLength(6); - Method method = randomFrom(Method.values()); - Mac mac = createMac(method); - byte[] salt = randomByteArrayOfLength(5); - HashProcessor processor = new HashProcessor("_tag", fields, targetField, salt, method, mac, false); - IngestDocument ingestDocument = new IngestDocument(docFields, new HashMap<>()); - processor.execute(ingestDocument); - - Map targetFieldMap = ingestDocument.getFieldValue(targetField, Map.class); - for (Map.Entry entry : targetFieldMap.entrySet()) { - Object expectedTargetFieldValue = method.hash(mac, salt, ingestDocument.getFieldValue(entry.getKey(), String.class)); - assertThat(entry.getValue(), equalTo(expectedTargetFieldValue)); - byte[] bytes = Base64.getDecoder().decode(entry.getValue()); - byte[] actualSaltPrefix = new byte[salt.length]; - System.arraycopy(bytes, 0, actualSaltPrefix, 0, salt.length); - assertArrayEquals(salt, actualSaltPrefix); - } - } - - private Mac createMac(Method method) throws Exception { - char[] password = randomAlphaOfLengthBetween(1, 10).toCharArray(); - byte[] salt = randomAlphaOfLength(5).getBytes(StandardCharsets.UTF_8); - int iterations = randomIntBetween(1, 10); - SecretKeyFactory secretKeyFactory = SecretKeyFactory.getInstance("PBKDF2With" + method.getAlgorithm()); - PBEKeySpec keySpec = new PBEKeySpec(password, salt, iterations, 128); - byte[] pbkdf2 = secretKeyFactory.generateSecret(keySpec).getEncoded(); - Mac mac = Mac.getInstance(method.getAlgorithm()); - mac.init(new SecretKeySpec(pbkdf2, method.getAlgorithm())); - return mac; - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java index bac5e0b3f5008..abd5768bebec9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java @@ -43,11 +43,10 @@ import java.util.Collection; import java.util.concurrent.CountDownLatch; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.elasticsearch.xpack.security.test.SecurityTestUtils.writeFile; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase { private static int randomClientPort; @@ -66,25 +65,18 @@ public boolean transportSSLEnabled() { protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder settingsBuilder = Settings.builder(); String randomClientPortRange = randomClientPort + "-" + (randomClientPort+100); - - Path store; - try { - store = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); - assertThat(Files.exists(store), is(true)); - } catch (Exception e) { - throw new RuntimeException(e); - } - + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); settingsBuilder.put(super.nodeSettings(nodeOrdinal)) - .put("transport.profiles.client.xpack.security.ssl.truststore.path", store) // settings for client truststore - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("transport.profiles.client.xpack.security.type", "client") - .put("transport.profiles.client.port", randomClientPortRange) - // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent - .put("transport.profiles.client.bind_host", "localhost") - .put("xpack.security.audit.enabled", false) - .put(XPackSettings.WATCHER_ENABLED.getKey(), false) - .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false); + .putList("transport.profiles.client.xpack.security.ssl.certificate_authorities", + Arrays.asList(certPath.toString())) // settings for client truststore + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("transport.profiles.client.xpack.security.type", "client") + .put("transport.profiles.client.port", randomClientPortRange) + // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent + .put("transport.profiles.client.bind_host", "localhost") + .put("xpack.security.audit.enabled", false) + .put(XPackSettings.WATCHER_ENABLED.getKey(), false) + .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false); if (randomBoolean()) { settingsBuilder.put("transport.profiles.default.xpack.security.type", "node"); // this is default lets set it randomly } @@ -120,7 +112,12 @@ public void testThatConnectionToServerTypeConnectionWorks() throws IOException, //.put("xpack.ml.autodetect_process", false); Collection> mockPlugins = Arrays.asList( LocalStateSecurity.class, TestZenDiscovery.TestPlugin.class, MockHttpTransport.TestPlugin.class); - addSSLSettingsForStore(nodeSettings, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + addSSLSettingsForPEMFiles( + nodeSettings, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (Node node = new MockNode(nodeSettings.build(), mockPlugins)) { node.start(); ensureStableCluster(cluster().size() + 1); @@ -159,7 +156,12 @@ public void testThatConnectionToClientTypeConnectionIsRejected() throws IOExcept //.put("xpack.ml.autodetect_process", false); Collection> mockPlugins = Arrays.asList( LocalStateSecurity.class, TestZenDiscovery.TestPlugin.class, MockHttpTransport.TestPlugin.class); - addSSLSettingsForStore(nodeSettings, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + addSSLSettingsForPEMFiles( + nodeSettings, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (Node node = new MockNode(nodeSettings.build(), mockPlugins)) { node.start(); TransportService instance = node.injector().getInstance(TransportService.class); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java index f03a4255b7fe7..bc674ae1aa00e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java @@ -21,7 +21,8 @@ // TODO delete this test? public class IPHostnameVerificationTests extends SecurityIntegTestCase { - Path keystore; + private Path certPath; + private Path keyPath; @Override protected boolean transportSSLEnabled() { @@ -46,36 +47,37 @@ protected Settings nodeSettings(int nodeOrdinal) { .putList("discovery.zen.ping.unicast.hosts", newUnicastAddresses); try { - //This keystore uses a cert with a CN of "Elasticsearch Test Node" and IPv4+IPv6 ip addresses as SubjectAlternativeNames - keystore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.jks"); - assertThat(Files.exists(keystore), is(true)); + //Use a cert with a CN of "Elasticsearch Test Node" and IPv4+IPv6 ip addresses as SubjectAlternativeNames + certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.crt"); + keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem"); + assertThat(Files.exists(certPath), is(true)); } catch (Exception e) { throw new RuntimeException(e); } SecuritySettingsSource.addSecureSettings(settingsBuilder, secureSettings -> { - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode-ip-only"); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode-ip-only"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode-ip-only"); }); - return settingsBuilder.put("xpack.ssl.keystore.path", keystore.toAbsolutePath()) // settings for client truststore - .put("xpack.ssl.truststore.path", keystore.toAbsolutePath()) // settings for client truststore - .put(TcpTransport.BIND_HOST.getKey(), "127.0.0.1") - .put("network.host", "127.0.0.1") - .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) - .put("xpack.ssl.verification_mode", "full") - .build(); + return settingsBuilder.put("xpack.ssl.key", keyPath.toAbsolutePath()) + .put("xpack.ssl.certificate", certPath.toAbsolutePath()) + .put("xpack.ssl.certificate_authorities", certPath.toAbsolutePath()) + .put(TcpTransport.BIND_HOST.getKey(), "127.0.0.1") + .put("network.host", "127.0.0.1") + .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) + .put("xpack.ssl.verification_mode", "full") + .build(); } @Override protected Settings transportClientSettings() { Settings clientSettings = super.transportClientSettings(); return Settings.builder().put(clientSettings.filter(k -> k.startsWith("xpack.ssl.") == false)) - .put("xpack.ssl.verification_mode", "certificate") - .put("xpack.ssl.keystore.path", keystore.toAbsolutePath()) - .put("xpack.ssl.keystore.password", "testnode-ip-only") - .put("xpack.ssl.truststore.path", keystore.toAbsolutePath()) - .put("xpack.ssl.truststore.password", "testnode-ip-only") - .build(); + .put("xpack.ssl.verification_mode", "certificate") + .put("xpack.ssl.key", keyPath.toAbsolutePath()) + .put("xpack.ssl.certificate", certPath.toAbsolutePath()) + .put("xpack.ssl.key_passphrase", "testnode-ip-only") + .put("xpack.ssl.certificate_authorities", certPath) + .build(); } public void testTransportClientConnectionWorksWithIPOnlyHostnameVerification() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java index ad64dea79a587..23ca3c1fe9fe4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java @@ -39,17 +39,21 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase { private SSLService sslService; private Environment env; - + private Path testnodeCert; + private Path testnodeKey; @Before public void createSSLService() throws Exception { - Path testNodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); + testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); + MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testNodeStore) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); } @@ -144,15 +148,11 @@ public void testCustomSSLConfiguration() throws Exception { } public void testThatExceptionIsThrownWhenConfiguredWithoutSslKey() throws Exception { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .setSecureSettings(secureSettings) - .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true) - .put("path.home", createTempDir()) - .build(); + .put("xpack.ssl.certificate_authorities", testnodeCert) + .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true) + .put("path.home", createTempDir()) + .build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -163,13 +163,13 @@ public void testThatExceptionIsThrownWhenConfiguredWithoutSslKey() throws Except public void testNoExceptionWhenConfiguredWithoutSslKeySSLDisabled() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()) - .build(); + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .setSecureSettings(secureSettings) + .put("path.home", createTempDir()) + .build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportTests.java index f87ab36d3d574..e9d91f5bd2d6a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportTests.java @@ -41,15 +41,17 @@ public class SecurityNetty4ServerTransportTests extends ESTestCase { @Before public void createSSLService() throws Exception { - Path testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); + Path testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.keystore.path", testnodeStore) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()) - .build(); + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .setSecureSettings(secureSettings) + .put("path.home", createTempDir()) + .build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); } @@ -179,17 +181,18 @@ public void testProfileOptionalClientAuth() throws Exception { public void testTransportSSLOverridesGlobalSSL() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.security.transport.ssl.keystore.secure_password", "testnode"); - secureSettings.setString("xpack.ssl.truststore.secure_password", "truststore-testnode-only"); + secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testnode"); Settings.Builder builder = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.security.transport.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .put("xpack.security.transport.ssl.client_authentication", "none") - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()); + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.security.transport.ssl.key", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) + .put("xpack.security.transport.ssl.certificate", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("xpack.security.transport.ssl.client_authentication", "none") + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .setSecureSettings(secureSettings) + .put("path.home", createTempDir()); Settings settings = builder.build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java index 148453b5f84b0..c61b5782f75c4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java @@ -19,6 +19,7 @@ import java.net.InetSocketAddress; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Arrays; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; @@ -35,25 +36,33 @@ protected Settings nodeSettings(int nodeOrdinal) { Settings settings = super.nodeSettings(nodeOrdinal); Settings.Builder settingsBuilder = Settings.builder(); settingsBuilder.put(settings.filter(k -> k.startsWith("xpack.ssl.") == false), false); - Path keystore; + Path keyPath; + Path certPath; + Path nodeCertPath; try { /* * This keystore uses a cert without any subject alternative names and a CN of "Elasticsearch Test Node No SAN" * that will not resolve to a DNS name and will always cause hostname verification failures */ - keystore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.jks"); - assert keystore != null; - assertThat(Files.exists(keystore), is(true)); + keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem"); + certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt"); + nodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + assert keyPath != null; + assert certPath != null; + assert nodeCertPath != null; + assertThat(Files.exists(certPath), is(true)); + assertThat(Files.exists(nodeCertPath), is(true)); + assertThat(Files.exists(keyPath), is(true)); } catch (Exception e) { throw new RuntimeException(e); } SecuritySettingsSource.addSecureSettings(settingsBuilder, secureSettings -> { - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode-no-subjaltname"); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode-no-subjaltname"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode-no-subjaltname"); }); - return settingsBuilder.put("xpack.ssl.keystore.path", keystore.toAbsolutePath()) - .put("xpack.ssl.truststore.path", keystore.toAbsolutePath()) + return settingsBuilder.put("xpack.ssl.key", keyPath.toAbsolutePath()) + .put("xpack.ssl.certificate", certPath.toAbsolutePath()) + .putList("xpack.ssl.certificate_authorities", Arrays.asList(certPath.toString(), nodeCertPath.toString())) // disable hostname verification as this test uses certs without a valid SAN or DNS in the CN .put("xpack.ssl.verification_mode", "certificate") .build(); @@ -61,22 +70,32 @@ protected Settings nodeSettings(int nodeOrdinal) { @Override protected Settings transportClientSettings() { - Path keystore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.jks"); - assert keystore != null; + Path keyPath; + Path certPath; + Path nodeCertPath; + try { + keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem"); + certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt"); + nodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + assert keyPath != null; + assert certPath != null; + assert nodeCertPath != null; + assertThat(Files.exists(certPath), is(true)); + assertThat(Files.exists(nodeCertPath), is(true)); + assertThat(Files.exists(keyPath), is(true)); + } catch (Exception e) { + throw new RuntimeException(e); + } Settings settings = super.transportClientSettings(); // remove all ssl settings Settings.Builder builder = Settings.builder(); builder.put(settings.filter( k -> k.startsWith("xpack.ssl.") == false), false); builder.put("xpack.ssl.verification_mode", "certificate") - .put("xpack.ssl.keystore.path", keystore.toAbsolutePath()) // settings for client keystore - .put("xpack.ssl.keystore.password", "testnode-no-subjaltname"); - - if (randomBoolean()) { - // randomly set the truststore, if not set the keystore should be used - builder.put("xpack.ssl.truststore.path", keystore.toAbsolutePath()) - .put("xpack.ssl.truststore.password", "testnode-no-subjaltname"); - } + .put("xpack.ssl.key", keyPath.toAbsolutePath()) + .put("xpack.ssl.key_passphrase", "testnode-no-subjaltname") + .put("xpack.ssl.certificate", certPath.toAbsolutePath()) + .putList("xpack.ssl.certificate_authorities", Arrays.asList(certPath.toString(), nodeCertPath.toString())); return builder.build(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java index b98e4e0ce5735..303ed92130aaf 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java @@ -7,21 +7,21 @@ import org.elasticsearch.nio.InboundChannelBuffer; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; -import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLException; -import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.TrustManager; import java.io.IOException; -import java.io.InputStream; import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.security.KeyStore; import java.security.SecureRandom; import java.util.Arrays; +import java.util.Collections; import java.util.function.Supplier; public class SSLDriverTests extends ESTestCase { @@ -57,8 +57,15 @@ public void testPingPongAndClose() throws Exception { public void testRenegotiate() throws Exception { SSLContext sslContext = getSSLContext(); - SSLDriver clientDriver = getDriver(sslContext.createSSLEngine(), true); - SSLDriver serverDriver = getDriver(sslContext.createSSLEngine(), false); + SSLEngine serverEngine = sslContext.createSSLEngine(); + SSLEngine clientEngine = sslContext.createSSLEngine(); + + String[] serverProtocols = {"TLSv1.2"}; + serverEngine.setEnabledProtocols(serverProtocols); + String[] clientProtocols = {"TLSv1.2"}; + clientEngine.setEnabledProtocols(clientProtocols); + SSLDriver clientDriver = getDriver(clientEngine, true); + SSLDriver serverDriver = getDriver(serverEngine, false); handshake(clientDriver, serverDriver); @@ -119,16 +126,27 @@ public void testHandshakeFailureBecauseProtocolMismatch() throws Exception { SSLContext sslContext = getSSLContext(); SSLEngine clientEngine = sslContext.createSSLEngine(); SSLEngine serverEngine = sslContext.createSSLEngine(); - String[] serverProtocols = {"TLSv1.1", "TLSv1.2"}; + String[] serverProtocols = {"TLSv1.2"}; serverEngine.setEnabledProtocols(serverProtocols); - String[] clientProtocols = {"TLSv1"}; + String[] clientProtocols = {"TLSv1.1"}; clientEngine.setEnabledProtocols(clientProtocols); SSLDriver clientDriver = getDriver(clientEngine, true); SSLDriver serverDriver = getDriver(serverEngine, false); SSLException sslException = expectThrows(SSLException.class, () -> handshake(clientDriver, serverDriver)); - assertEquals("Client requested protocol TLSv1 not enabled or not supported", sslException.getMessage()); - failedCloseAlert(serverDriver, clientDriver); + String oldExpected = "Client requested protocol TLSv1.1 not enabled or not supported"; + String jdk11Expected = "Received fatal alert: protocol_version"; + boolean expectedMessage = oldExpected.equals(sslException.getMessage()) || jdk11Expected.equals(sslException.getMessage()); + assertTrue("Unexpected exception message: " + sslException.getMessage(), expectedMessage); + + // In JDK11 we need an non-application write + if (serverDriver.needsNonApplicationWrite()) { + serverDriver.nonApplicationWrite(); + } + // Prior to JDK11 we still need to send a close alert + if (serverDriver.isClosed() == false) { + failedCloseAlert(serverDriver, clientDriver); + } } public void testHandshakeFailureBecauseNoCiphers() throws Exception { @@ -144,11 +162,18 @@ public void testHandshakeFailureBecauseNoCiphers() throws Exception { SSLDriver clientDriver = getDriver(clientEngine, true); SSLDriver serverDriver = getDriver(serverEngine, false); - SSLException sslException = expectThrows(SSLException.class, () -> handshake(clientDriver, serverDriver)); - assertEquals("no cipher suites in common", sslException.getMessage()); - failedCloseAlert(serverDriver, clientDriver); + expectThrows(SSLException.class, () -> handshake(clientDriver, serverDriver)); + // In JDK11 we need an non-application write + if (serverDriver.needsNonApplicationWrite()) { + serverDriver.nonApplicationWrite(); + } + // Prior to JDK11 we still need to send a close alert + if (serverDriver.isClosed() == false) { + failedCloseAlert(serverDriver, clientDriver); + } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32144") public void testCloseDuringHandshake() throws Exception { SSLContext sslContext = getSSLContext(); SSLDriver clientDriver = getDriver(sslContext.createSSLEngine(), true); @@ -205,19 +230,16 @@ private void failedCloseAlert(SSLDriver sendDriver, SSLDriver receiveDriver) thr } private SSLContext getSSLContext() throws Exception { - String relativePath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks"; + String certPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"; + String keyPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem"; SSLContext sslContext; - try (InputStream in = Files.newInputStream(getDataPath(relativePath))) { - KeyStore keyStore = KeyStore.getInstance("jks"); - keyStore.load(in, "testclient".toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(keyStore); - KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - kmf.init(keyStore, "testclient".toCharArray()); - sslContext = SSLContext.getInstance("TLSv1.2"); - sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); - return sslContext; - } + TrustManager tm = CertParsingUtils.trustManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath + (certPath)))); + KeyManager km = CertParsingUtils.keyManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath + (certPath))), PemUtils.readPrivateKey(getDataPath(keyPath), "testclient"::toCharArray), "testclient".toCharArray()); + sslContext = SSLContext.getInstance("TLSv1.2"); + sslContext.init(new KeyManager[] { km }, new TrustManager[] { tm }, new SecureRandom()); + return sslContext; } private void normalClose(SSLDriver sendDriver, SSLDriver receiveDriver) throws IOException { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java index 9f33da7ae88af..feca093e581af 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java @@ -55,15 +55,17 @@ public class SimpleSecurityNioTransportTests extends AbstractSimpleTransportTestCase { private SSLService createSSLService() { - Path testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); + Path testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.keystore.path", testnodeStore) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()) - .build(); + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); try { return new SSLService(settings, TestEnvironment.newEnvironment(settings)); } catch (Exception e) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java index 42c5cd7c7ab45..df49103a25999 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.security.transport.ssl; -import com.unboundid.util.ssl.TrustAllTrustManager; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -77,7 +76,8 @@ public void testConnection() throws Exception { X509ExtendedKeyManager x509ExtendedKeyManager = CertParsingUtils.keyManager(certs, privateKey, new char[0]); SSLContext sslContext = SSLContext.getInstance("TLS"); sslContext.init(new X509ExtendedKeyManager[] { x509ExtendedKeyManager }, - new TrustManager[] { new TrustAllTrustManager(false) }, new SecureRandom()); + new TrustManager[]{CertParsingUtils.trustManager(CertParsingUtils.readCertificates(Collections.singletonList(certPath)))}, + new SecureRandom()); SSLSocketFactory socketFactory = sslContext.getSocketFactory(); NodesInfoResponse response = client().admin().cluster().prepareNodesInfo().setTransport(true).get(); TransportAddress address = randomFrom(response.getNodes()).getTransport().getAddress().publishAddress(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java index fa8fd00aeba61..541e660691275 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java @@ -42,12 +42,13 @@ import java.security.KeyStore; import java.security.SecureRandom; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Set; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -97,6 +98,7 @@ public void testThatUnconfiguredCiphersAreRejected() throws Exception { // no SSL exception as this is the exception is returned when connecting public void testThatTransportClientUsingSSLv3ProtocolIsRejected() { + assumeFalse("Can't run in a FIPS JVM as SSLv3 SSLContext not available", inFipsJvm()); try (TransportClient transportClient = new TestXPackTransportClient(Settings.builder() .put(transportClientSettings()) .put("node.name", "programmatic_transport_client") @@ -116,7 +118,11 @@ public void testThatTransportClientUsingSSLv3ProtocolIsRejected() { public void testThatConnectionToHTTPWorks() throws Exception { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks", "testclient"); + addSSLSettingsForPEMFiles( + builder, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem", + "testclient", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); SSLService service = new SSLService(builder.build(), null); CredentialsProvider provider = new BasicCredentialsProvider(); @@ -135,6 +141,7 @@ public void testThatConnectionToHTTPWorks() throws Exception { } public void testThatHttpUsingSSLv3IsRejected() throws Exception { + assumeFalse("Can't run in a FIPS JVM as we can't even get an instance of SSL SSL Context", inFipsJvm()); SSLContext sslContext = SSLContext.getInstance("SSL"); TrustManagerFactory factory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); factory.init((KeyStore) null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java index 1d7ec67762ba0..d3ab5d092ab5b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java @@ -20,10 +20,11 @@ import java.net.InetAddress; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Arrays; import java.util.Collections; import static org.elasticsearch.test.SecuritySettingsSource.TEST_USER_NAME; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; @@ -42,9 +43,9 @@ public static void getRandomPort() { /** * On each node sets up the following profiles: *
    - *
  • default: testnode keystore. Requires client auth
  • - *
  • client: testnode-client-profile keystore that only trusts the testclient cert. Requires client auth
  • - *
  • no_client_auth: testnode keystore. Does not require client auth
  • + *
  • default: testnode keypair. Requires client auth
  • + *
  • client: testnode-client-profile profile that only trusts the testclient cert. Requires client auth
  • + *
  • no_client_auth: testnode keypair. Does not require client auth
  • *
*/ @Override @@ -52,26 +53,25 @@ protected Settings nodeSettings(int nodeOrdinal) { String randomClientPortRange = randomClientPort + "-" + (randomClientPort+100); String randomNoClientAuthPortRange = randomNoClientAuthPort + "-" + (randomNoClientAuthPort+100); - Path store; + Path trustCert; try { - store = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-client-profile.jks"); - assertThat(Files.exists(store), is(true)); + trustCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt"); + assertThat(Files.exists(trustCert), is(true)); } catch (Exception e) { throw new RuntimeException(e); } Settings settings = Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - // client set up here - .put("transport.profiles.client.port", randomClientPortRange) - // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent - .put("transport.profiles.client.bind_host", "localhost") - .put("transport.profiles.client.xpack.security.ssl.truststore.path", store.toAbsolutePath()) - .put("transport.profiles.client.xpack.security.ssl.truststore.password", "testnode-client-profile") - .put("transport.profiles.no_client_auth.port", randomNoClientAuthPortRange) - .put("transport.profiles.no_client_auth.bind_host", "localhost") - .put("transport.profiles.no_client_auth.xpack.security.ssl.client_authentication", SSLClientAuth.NONE) - .build(); + .put(super.nodeSettings(nodeOrdinal)) + // client set up here + .put("transport.profiles.client.port", randomClientPortRange) + // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent + .put("transport.profiles.client.bind_host", "localhost") + .put("transport.profiles.client.xpack.security.ssl.certificate_authorities", trustCert.toAbsolutePath()) + .put("transport.profiles.no_client_auth.port", randomNoClientAuthPortRange) + .put("transport.profiles.no_client_auth.bind_host", "localhost") + .put("transport.profiles.no_client_auth.xpack.security.ssl.client_authentication", SSLClientAuth.NONE) + .build(); logger.info("node {} settings:\n{}", nodeOrdinal, settings); return settings; } @@ -140,15 +140,18 @@ public void testThatStandardTransportClientCannotConnectToClientProfile() throws } /** - * Uses a transport client with a custom keystore; this keystore testclient-client-profile.jks trusts the testnode + * Uses a transport client with a custom key pair; TransportClient only trusts the testnode * certificate and had its own self signed certificate. This test connects to the client profile, which is only * set to trust the testclient-client-profile certificate so the connection should always succeed */ public void testThatProfileTransportClientCanConnectToClientProfile() throws Exception { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.jks", - "testclient-client-profile"); + addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem", + "testclient-client-profile", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (TransportClient transportClient = createTransportClient(builder.build())) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), getProfilePort("client"))); assertGreenClusterState(transportClient); @@ -156,16 +159,19 @@ public void testThatProfileTransportClientCanConnectToClientProfile() throws Exc } /** - * Uses a transport client with a custom keystore; this keystore testclient-client-profile.jks trusts the testnode + * Uses a transport client with a custom key pair; TransportClient only trusts the testnode * certificate and had its own self signed certificate. This test connects to the no_client_auth profile, which * uses a truststore that does not trust the testclient-client-profile certificate but does not require client * authentication */ public void testThatProfileTransportClientCanConnectToNoClientAuthProfile() throws Exception { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.jks", - "testclient-client-profile"); + addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem", + "testclient-client-profile", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (TransportClient transportClient = createTransportClient(builder.build())) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), getProfilePort("no_client_auth"))); @@ -174,16 +180,19 @@ public void testThatProfileTransportClientCanConnectToNoClientAuthProfile() thro } /** - * Uses a transport client with a custom keystore; this keystore testclient-client-profile.jks trusts the testnode + * Uses a transport client with a custom key pair; TransportClient only trusts the testnode * certificate and had its own self signed certificate. This test connects to the default profile, which * uses a truststore that does not trust the testclient-client-profile certificate and requires client authentication * so the connection should always fail */ public void testThatProfileTransportClientCannotConnectToDefaultProfile() throws Exception { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.jks", - "testclient-client-profile"); + addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem", + "testclient-client-profile", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (TransportClient transportClient = createTransportClient(builder.build())) { TransportAddress transportAddress = randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses()); transportClient.addTransportAddress(transportAddress); @@ -253,19 +262,17 @@ public void testThatTransportClientCannotConnectToNoClientAuthProfile() throws E } /** - * Uses a transport client with a custom truststore; this truststore truststore-testnode-only only trusts the testnode - * certificate and contains no other certification. This test connects to the no_client_auth profile, which uses - * the testnode certificate and does not require to present a certificate, so this connection should always succeed + * Uses a transport client that only trusts the testnode certificate. This test connects to the no_client_auth profile, + * which uses the testnode certificate and does not require to present a certificate, so this connection should always succeed */ public void testThatTransportClientWithOnlyTruststoreCanConnectToNoClientAuthProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .put("xpack.ssl.truststore.password", "truststore-testnode-only") - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), @@ -274,21 +281,19 @@ public void testThatTransportClientWithOnlyTruststoreCanConnectToNoClientAuthPro } /** - * Uses a transport client with a custom truststore; this truststore truststore-testnode-only only trusts the testnode - * certificate and contains no other certification. This test connects to the client profile, which uses + * Uses a transport client that only trusts the testnode certificate. This test connects to the client profile, which uses * the testnode certificate and requires the client to present a certificate, so this connection will never work as * the client has no certificate to present */ public void testThatTransportClientWithOnlyTruststoreCannotConnectToClientProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .put("xpack.ssl.truststore.password", "truststore-testnode-only") - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), getProfilePort("client"))); @@ -300,21 +305,19 @@ public void testThatTransportClientWithOnlyTruststoreCannotConnectToClientProfil } /** - * Uses a transport client with a custom truststore; this truststore truststore-testnode-only only trusts the testnode - * certificate and contains no other certification. This test connects to the default profile, which uses + * Uses a transport client that only trusts the testnode certificate. This test connects to the default profile, which uses * the testnode certificate and requires the client to present a certificate, so this connection will never work as * the client has no certificate to present */ public void testThatTransportClientWithOnlyTruststoreCannotConnectToDefaultProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .put("xpack.ssl.truststore.password", "truststore-testnode-only") - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); @@ -332,11 +335,11 @@ public void testThatTransportClientWithOnlyTruststoreCannotConnectToDefaultProfi */ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToDefaultProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.enabled", true) - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.security.transport.ssl.enabled", true) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); @@ -354,11 +357,11 @@ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToDefaultProf */ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToClientProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.enabled", true) - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.security.transport.ssl.enabled", true) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), getProfilePort("client"))); @@ -376,11 +379,11 @@ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToClientProfi */ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToNoClientAuthProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.enabled", true) - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.security.transport.ssl.enabled", true) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslNullCipherTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslNullCipherTests.java index fb5d567bb3627..7427c5a67e92d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslNullCipherTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslNullCipherTests.java @@ -10,12 +10,18 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.SecurityIntegTestCase; +import org.junit.BeforeClass; /** * An extremely simple test that shows SSL will work with a cipher that does not perform encryption */ public class SslNullCipherTests extends SecurityIntegTestCase { + @BeforeClass + public static void muteInFips() { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); + } + @Override public boolean transportSSLEnabled() { return true; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java index fdb66916884c7..d205c7cd9334e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java @@ -22,20 +22,22 @@ import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.core.TestXPackTransportClient; import org.elasticsearch.xpack.core.security.SecurityField; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import org.elasticsearch.xpack.core.ssl.SSLClientAuth; import org.elasticsearch.xpack.security.LocalStateSecurity; -import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.TrustManager; import java.io.IOException; -import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; -import java.security.KeyStore; import java.security.SecureRandom; import java.security.cert.CertPathBuilderException; +import java.util.Arrays; +import java.util.Collections; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsString; @@ -74,7 +76,11 @@ public void testThatHttpFailsWithoutSslClientAuth() throws IOException { } catch (IOException e) { Throwable t = ExceptionsHelper.unwrap(e, CertPathBuilderException.class); assertThat(t, instanceOf(CertPathBuilderException.class)); - assertThat(t.getMessage(), containsString("unable to find valid certification path to requested target")); + if (inFipsJvm()) { + assertThat(t.getMessage(), containsString("Unable to find certificate chain")); + } else { + assertThat(t.getMessage(), containsString("unable to find valid certification path to requested target")); + } } } @@ -89,24 +95,27 @@ public void testThatHttpWorksWithSslClientAuth() throws IOException { } public void testThatTransportWorksWithoutSslClientAuth() throws IOException { - // specify an arbitrary keystore, that does not include the certs needed to connect to the transport protocol - Path store = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.jks"); + // specify an arbitrary key and certificate - not the certs needed to connect to the transport protocol + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt"); + Path nodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); - if (Files.notExists(store)) { - throw new ElasticsearchException("store path doesn't exist"); + if (Files.notExists(keyPath) || Files.notExists(certPath)) { + throw new ElasticsearchException("key or certificate path doesn't exist"); } MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testclient-client-profile"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testclient-client-profile"); Settings settings = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) - .put("xpack.ssl.keystore.path", store) - .setSecureSettings(secureSettings) - .put("cluster.name", internalCluster().getClusterName()) - .put(SecurityField.USER_SETTING.getKey(), - transportClientUsername() + ":" + new String(transportClientPassword().getChars())) - .build(); + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .put("xpack.ssl.certificate_authorities", nodeCertPath) + .setSecureSettings(secureSettings) + .put("cluster.name", internalCluster().getClusterName()) + .put(SecurityField.USER_SETTING.getKey(), transportClientUsername() + ":" + new String(transportClientPassword().getChars())) + .build(); try (TransportClient client = new TestXPackTransportClient(settings, LocalStateSecurity.class)) { Transport transport = internalCluster().getDataNodeInstance(Transport.class); TransportAddress transportAddress = transport.boundAddress().publishAddress(); @@ -117,19 +126,19 @@ public void testThatTransportWorksWithoutSslClientAuth() throws IOException { } private SSLContext getSSLContext() { - try (InputStream in = - Files.newInputStream(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks"))) { - KeyStore keyStore = KeyStore.getInstance("jks"); - keyStore.load(in, "testclient".toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(keyStore); - KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - kmf.init(keyStore, "testclient".toCharArray()); + try { + String certPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"; + String nodeCertPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"; + String keyPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem"; + TrustManager tm = CertParsingUtils.trustManager(CertParsingUtils.readCertificates(Arrays.asList(getDataPath + (certPath), getDataPath(nodeCertPath)))); + KeyManager km = CertParsingUtils.keyManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath + (certPath))), PemUtils.readPrivateKey(getDataPath(keyPath), "testclient"::toCharArray), "testclient".toCharArray()); SSLContext context = SSLContext.getInstance("TLSv1.2"); - context.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); + context.init(new KeyManager[] { km }, new TrustManager[] { tm }, new SecureRandom()); return context; } catch (Exception e) { - throw new ElasticsearchException("failed to initialize a TrustManagerFactory", e); + throw new ElasticsearchException("failed to initialize SSLContext", e); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java index 4269d8a78eb7a..03f963cc59ca6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java @@ -12,9 +12,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.transport.Transport; -import org.elasticsearch.xpack.core.ssl.CertParsingUtils; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -23,16 +21,12 @@ import javax.net.ssl.SSLSocketFactory; import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; import java.net.SocketException; import java.nio.file.AtomicMoveNotSupportedException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; -import java.security.KeyStore; -import java.security.cert.X509Certificate; -import java.util.Collections; +import java.util.Arrays; import java.util.concurrent.CountDownLatch; import static org.hamcrest.Matchers.containsString; @@ -43,34 +37,51 @@ */ public class SSLReloadIntegTests extends SecurityIntegTestCase { - private Path nodeStorePath; + private Path nodeKeyPath; + private Path nodeCertPath; + private Path clientCertPath; + private Path updateableCertPath; @Override public Settings nodeSettings(int nodeOrdinal) { - //Node starts with testnode.jks - if (nodeStorePath == null) { - Path origPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); - Path tempDir = createTempDir(); - nodeStorePath = tempDir.resolve("testnode.jks"); - try { - Files.copy(origPath, nodeStorePath); - } catch (IOException e) { - throw new ElasticsearchException("failed to copy keystore"); + // Nodes start trusting testnode.crt and testclient.crt + Path origKeyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); + Path origCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path origClientCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"); + Path tempDir = createTempDir(); + try { + if (nodeKeyPath == null) { + nodeKeyPath = tempDir.resolve("testnode.pem"); + Files.copy(origKeyPath, nodeKeyPath); + } + if (nodeCertPath == null) { + nodeCertPath = tempDir.resolve("testnode.crt"); + Files.copy(origCertPath, nodeCertPath); + } + if (clientCertPath == null) { + clientCertPath = tempDir.resolve("testclient.crt"); + Files.copy(origClientCertPath, clientCertPath); } + // Placeholder trusted certificate that will be updated later on + if (updateableCertPath == null) { + updateableCertPath = tempDir.resolve("updateable.crt"); + Files.copy(origCertPath, updateableCertPath); + } + } catch (IOException e) { + throw new ElasticsearchException("failed to copy key or certificate", e); } + Settings settings = super.nodeSettings(nodeOrdinal); Settings.Builder builder = Settings.builder() .put(settings.filter((s) -> s.startsWith("xpack.ssl.") == false)); - - SecuritySettingsSource.addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); - builder.put("resource.reload.interval.high", "1s") - .put("xpack.ssl.keystore.path", nodeStorePath); - - if (builder.get("xpack.ssl.truststore.path") != null) { - builder.put("xpack.ssl.truststore.path", nodeStorePath); - } + builder.put("path.home", createTempDir()) + .put("xpack.ssl.key", nodeKeyPath) + .put("xpack.ssl.key_passphrase", "testnode") + .put("xpack.ssl.certificate", nodeCertPath) + .putList("xpack.ssl.certificate_authorities", Arrays.asList(nodeCertPath.toString(), clientCertPath.toString(), + updateableCertPath.toString())) + .put("resource.reload.interval.high", "1s"); return builder.build(); } @@ -81,25 +92,27 @@ protected boolean transportSSLEnabled() { } public void testThatSSLConfigurationReloadsOnModification() throws Exception { - Path keystorePath = createTempDir().resolve("testnode_updated.jks"); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks"), keystorePath); - X509Certificate certificate = CertParsingUtils.readX509Certificates(Collections.singletonList(getDataPath - ("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt")))[0]; + Path keyPath = createTempDir().resolve("testnode_updated.pem"); + Path certPath = createTempDir().resolve("testnode_updated.crt"); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem"), keyPath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), certPath); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("path.home", createTempDir()) - .put("xpack.ssl.keystore.path", keystorePath) - .put("xpack.ssl.truststore.path", nodeStorePath) - .setSecureSettings(secureSettings) - .build(); + .put("path.home", createTempDir()) + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .putList("xpack.ssl.certificate_authorities", Arrays.asList(nodeCertPath.toString(), clientCertPath.toString(), + updateableCertPath.toString())) + .setSecureSettings(secureSettings) + .build(); String node = randomFrom(internalCluster().getNodeNames()); SSLService sslService = new SSLService(settings, TestEnvironment.newEnvironment(settings)); SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.ssl"); SSLSocketFactory sslSocketFactory = sslService.sslSocketFactory(sslConfiguration); TransportAddress address = internalCluster() - .getInstance(Transport.class, node).boundAddress().publishAddress(); + .getInstance(Transport.class, node).boundAddress().publishAddress(); + // Fails as our nodes do not trust testnode_updated.crt try (SSLSocket socket = (SSLSocket) sslSocketFactory.createSocket(address.getAddress(), address.getPort())) { assertThat(socket.isConnected(), is(true)); socket.startHandshake(); @@ -107,19 +120,11 @@ public void testThatSSLConfigurationReloadsOnModification() throws Exception { } catch (SSLHandshakeException | SocketException expected) { logger.trace("expected exception", expected); } - KeyStore nodeStore = KeyStore.getInstance("jks"); - try (InputStream in = Files.newInputStream(nodeStorePath)) { - nodeStore.load(in, "testnode".toCharArray()); - } - nodeStore.setCertificateEntry("newcert", certificate); - Path path = nodeStorePath.getParent().resolve("updated.jks"); - try (OutputStream out = Files.newOutputStream(path)) { - nodeStore.store(out, "testnode".toCharArray()); - } + // Copy testnode_updated.crt to the placeholder updateable.crt so that the nodes will start trusting it now try { - Files.move(path, nodeStorePath, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE); + Files.move(certPath, updateableCertPath, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE); } catch (AtomicMoveNotSupportedException e) { - Files.move(path, nodeStorePath, StandardCopyOption.REPLACE_EXISTING); + Files.move(certPath, updateableCertPath, StandardCopyOption.REPLACE_EXISTING); } CountDownLatch latch = new CountDownLatch(1); assertBusy(() -> { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java index beebf928fcf27..cf77ca975a4df 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java @@ -72,6 +72,7 @@ protected int maxNumberOfNodes() { @BeforeClass public static void setupCertificates() throws Exception { + assumeFalse("Can't run in a FIPS JVM, custom TrustManager implementations cannot be used.", inFipsJvm()); configPath = createTempDir(); Path caCertPath = PathUtils.get(SSLTrustRestrictionsTests.class.getResource ("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/nodes/ca.crt").toURI()); diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ad.crt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ad.crt new file mode 100644 index 0000000000000..2dbb06c49e6a6 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ad.crt @@ -0,0 +1,23 @@ +-----BEGIN CERTIFICATE----- +MIID1zCCAr+gAwIBAgIQWA24rVK7FopAgOHfEio/VjANBgkqhkiG9w0BAQsFADB+ +MRMwEQYKCZImiZPyLGQBGRYDY29tMR0wGwYKCZImiZPyLGQBGRYNZWxhc3RpY3Nl +YXJjaDEUMBIGCgmSJomT8ixkARkWBHRlc3QxEjAQBgoJkiaJk/IsZAEZFgJhZDEe +MBwGA1UEAxMVYWQtRUxBU1RJQ1NFQVJDSEFELUNBMB4XDTE0MDgyNzE2MjI0MloX +DTI5MDgyNzE2MzI0MlowfjETMBEGCgmSJomT8ixkARkWA2NvbTEdMBsGCgmSJomT +8ixkARkWDWVsYXN0aWNzZWFyY2gxFDASBgoJkiaJk/IsZAEZFgR0ZXN0MRIwEAYK +CZImiZPyLGQBGRYCYWQxHjAcBgNVBAMTFWFkLUVMQVNUSUNTRUFSQ0hBRC1DQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALNNZsDJ+lhsE/pCIkNlq6/F +xwv3PU2M+E1/SbWrLEtfbb1ATnn98DwxjpCj00wS0bt26/7zrhHKyX5LaxyS27ER +8bKpLSO4qcVWzDIQnVNk2XfBrYS/Og+6Pi/Lw/ylt/vE++kHWIJBc4O6i+pPByOM +oypM6bh71kTkpK8OTPqf+HiPp0qKhRah6XVtqTc+kOCOku2+wkELbCz8RNzF9ca6 +Uu3YxLi73pNdk0wDTmg6JVaUyVRpSkjJH4BAp9SVma6Rxy6tbh4e5P+8K8lY9ptM +TBzTsDS1EhNK/92xULfQbGT814Z294pF3ARMEJ89N+aegS++kz7CqjciZ1+bA6EC +AwEAAaNRME8wCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE +FIEKG0KdSVNknKcMZkbTlKo7N8MjMBAGCSsGAQQBgjcVAQQDAgEAMA0GCSqGSIb3 +DQEBCwUAA4IBAQBgbWBXPbEMTEsiVWzoxmTw1wJASBdPahx6CggutjGq3ASjby4p +nVCTwE4xdDEVyFGmeslSp9+23XjBuaiqVPtYw8P8hnG269J0q4cOF/VXOccRLeOw +HVDBv2a7xzgBSwc1KB50TLv07stcBmBYNu8anN6EwGksdgjb8IjRV6U3U+IvFNrI +rGifuIc/iRZD4Clhnpxw8tCsgcrcmz9CU7CN5RxKVEpZ6ou6ZjHO8l8H0t9zWrSI +PL+33iBGHNWlyU63N93XgJtxV1em1hHryLtTTtaVZJJ3R0OrLrUpG8SQ7zCUy62f +YtImFPClUMXY03yH+4DAhflueRvY/D1AKL12 +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_ca.crt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_ca.crt new file mode 100644 index 0000000000000..be5a6d0264579 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_ca.crt @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDmzCCAoOgAwIBAgIUdwsnIxjgSneHNVKT6JNCCsrQ3T0wDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMTgwMjE1MTc0ODQ2WhcNMjEwMjE0MTc0ODQ2WjA0MTIwMAYD +VQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKQAP/hdsWdu3Ck/Zteosity +nmXJTCnkecBYSLtjgYh9rPDFppj9KdsZ7+5P9FvxLv/t4Yw81YI24TrHk0CnMrD/ +QBaXDiPGeT9b6T/gWWVm1zQj2/567gH2UaIkIffy7q09BI9ICXSKDBRXRMLgVR19 +iiJkwWb3b5TVvaQI4M8sEmJIHXei2/cfEKVR5hBprtzeKkvg6o9DXx+nDv2ZEUZ7 +it5pEN5AjD5t0S3ymtlUU5lqnr8er6/Qcrua2EXxE1HyPEkpN/Cwl7tF1ICMdguf +vght5ql1/Pk43VmBMulI/6z5e+7GZ1+x79YA17gabtGJ+onB0zJxgDBj0tto7H8C +AwEAAaOBpDCBoTAdBgNVHQ4EFgQUZo2Y3maL2NoxbbkwRZiC37k6QMEwbwYDVR0j +BGgwZoAUZo2Y3maL2NoxbbkwRZiC37k6QMGhOKQ2MDQxMjAwBgNVBAMTKUVsYXN0 +aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2VuZXJhdGVkIENBghR3CycjGOBKd4c1 +UpPok0IKytDdPTAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBf +mkc4bvUR5+We/2rRqCmP4LFnl/LxfbZ9/pUPRdcxuowuK7YfxN8i44VXGpJvLtec +izhA8gvlj6GbYB/GNlHMogqEORbrMlu2o5Cev4HE/pcWpoqtVaDJqI5Hq4763EmJ +p2dXGMmU04H4LtkcCEt3xQfLQ+QIP4Dl2yEsNd248BKSsscCGm9V3vgzFzbdgndo +zUWv9hQCaEsKNtqvnkTqDy2uFjnf+xNoXFr/bI94gvD9HlZHnIC+g0TL5jjtSfCH +gjeXhC2bBKFtlSt4ClIdZTXWievYs6YDRREfaOi4F0757A/gf+hT0fjZ+9WWnUeM +UuvUnl71CNRnJ5JlNKBA +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_cert.crt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_cert.crt new file mode 100644 index 0000000000000..59ecbd22e8b23 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_cert.crt @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDoDCCAoigAwIBAgIUMVGoHuyNTjTFaoRmqFELz75jzDEwDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMTgwMjE1MTc0OTExWhcNMjEwMjE0MTc0OTExWjARMQ8wDQYD +VQQDEwZzYW1iYTQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtGBwa +n+7JN2vweSUsYh4zPmh8RPIE+nEVjK1lx/rADUBY7UVjfTYC+MVKKiezZe7gYCNT +7JNKazPpgVI9e3ZFKw/UxomLqRuuvn5bTh+1tMs3afY5+GGzi7oPmEbBO3ceg0Hi +rNSTDa1rfroZnRYK8uIeSZacQnAW90plITI7rBBt9jq+W9albFbDybfDgNv+yS/C +rzIsofm4rbFC3SMRYfrT6HvwDhjOmmYKZci5x7tsn0T+3tSiR44Bw5/DgiN5kX3m +/kl9qg1eoYWbCUy1dKmQlb4Nb4uNcxrIugLB3zjBkfhMZ0OHoveKh/lJASTWik9k +xQ9rEYbpsRbuXpsHAgMBAAGjgcwwgckwHQYDVR0OBBYEFJOLa7UXKtLPibgKeFh7 +Kq1+rS0/MG8GA1UdIwRoMGaAFGaNmN5mi9jaMW25MEWYgt+5OkDBoTikNjA0MTIw +MAYDVQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBD +QYIUdwsnIxjgSneHNVKT6JNCCsrQ3T0wLAYDVR0RBCUwI4IJbG9jYWxob3N0hwR/ +AAABhxAAAAAAAAAAAAAAAAAAAAABMAkGA1UdEwQCMAAwDQYJKoZIhvcNAQELBQAD +ggEBAEHqT1WHkcF8DuOgyIBx7wKcUVQ5H1qYYlJ1xgMGrKFFZLUzouLcON7oadEu +HLIJ4Z3AKD3bqWpcls5XJ9MTECGR48tou67x9cXqTV7jR3Rh0H/VGwzwhR85vbpu +o8ielOPL8XAQOfnAFESJii5sfCU4ZwLg+3evmGZdKfhU6rqQtLimgG/Gm96vOJne +y0a/TZTWrfAarithkOHHXSSAhEI5SdW5SlZAytF4AmYqFvafwxe1+NyFwfCRy0Xl +H40WgVsq+z84psU+WyORb3THX5rgB4au9nuMXOqFKAtrJSI/uApncYraaqU28rqB +gYd8XrtjhKOLw+6viqAKu8l7/cs= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index 9d27c2030d676..a0d9b24c50729 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -1,15 +1,3 @@ - -buildscript { - repositories { - maven { - url 'https://plugins.gradle.org/m2/' - } - } - dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' - } -} - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -49,7 +37,6 @@ dependencyLicenses { } shadowJar { - classifier = null relocate 'com.fasterxml', 'org.elasticsearch.fasterxml' } @@ -70,26 +57,3 @@ artifacts { nodeps nodepsJar archives shadowJar } - -publishing { - publications { - nebula(MavenPublication) { - artifact shadowJar - pom.withXml { - // Nebula is mistakenly including all dependencies that are already shadowed into the shadow jar - asNode().remove(asNode().dependencies) - } - } - } -} - -assemble.dependsOn shadowJar - -// Use the jar for testing so the tests are more "real" -test { - classpath -= compileJava.outputs.files - classpath -= configurations.compile - classpath -= configurations.runtime - classpath += shadowJar.outputs.files - dependsOn shadowJar -} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/hash_processor/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/hash_processor/10_basic.yml deleted file mode 100644 index ee84e02d2f498..0000000000000 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/hash_processor/10_basic.yml +++ /dev/null @@ -1,51 +0,0 @@ ---- -teardown: - - do: - ingest.delete_pipeline: - id: "my_pipeline" - ignore: 404 - ---- -"Test Hash Processor": - - - do: - cluster.health: - wait_for_status: yellow - - do: - ingest.put_pipeline: - id: "my_pipeline" - body: > - { - "processors": [ - { - "hash" : { - "fields" : ["user_ssid"], - "target_field" : "anonymized", - "salt": "_salt", - "iterations": 5, - "method": "sha1", - "key_setting": "xpack.security.ingest.hash.processor.key" - } - } - ] - } - - match: { acknowledged: true } - - - do: - index: - index: test - type: test - id: 1 - pipeline: "my_pipeline" - body: > - { - "user_ssid": "0123456789" - } - - - do: - get: - index: test - type: test - id: 1 - - match: { _source.anonymized: "X3NhbHQMW0oHJGEEE9obGcGv5tGd7HFyDw==" } - diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/12_index_alias.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/12_index_alias.yml index 44d91d691e1c2..1e947c5639d77 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/12_index_alias.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/12_index_alias.yml @@ -310,3 +310,74 @@ teardown: index: write_index_2 body: { "query": { "terms": { "_id": [ "19" ] } } } - match: { hits.total: 1 } + +--- +"Test bulk indexing into an alias when resolved to write index": + - do: + indices.update_aliases: + body: + actions: + - add: + index: write_index_2 + alias: can_write_2 + is_write_index: true + - add: + index: write_index_2 + alias: can_read_2 + is_write_index: true + - add: + index: write_index_1 + alias: can_write_3 + is_write_index: true + - add: + index: write_index_2 + alias: can_write_3 + is_write_index: false + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + bulk: + refresh: true + body: + - '{"index": {"_index": "can_read_1", "_type": "doc", "_id": "20"}}' + - '{"name": "doc20"}' + - '{"index": {"_index": "can_write_1", "_type": "doc", "_id": "21"}}' + - '{"name": "doc21"}' + - '{"index": {"_index": "can_read_2", "_type": "doc", "_id": "22"}}' + - '{"name": "doc22"}' + - '{"index": {"_index": "can_write_2", "_type": "doc", "_id": "23"}}' + - '{"name": "doc23"}' + - '{"index": {"_index": "can_write_3", "_type": "doc", "_id": "24"}}' + - '{"name": "doc24"}' + - '{"update": {"_index": "can_write_3", "_type": "doc", "_id": "24"}}' + - '{"doc": { "name": "doc_24"}}' + - '{"delete": {"_index": "can_write_3", "_type": "doc", "_id": "24"}}' + - match: { errors: true } + - match: { items.0.index.status: 403 } + - match: { items.0.index.error.type: "security_exception" } + - match: { items.1.index.status: 201 } + - match: { items.2.index.status: 403 } + - match: { items.2.index.error.type: "security_exception" } + - match: { items.3.index.status: 403 } + - match: { items.3.index.error.type: "security_exception" } + - match: { items.4.index.status: 201 } + - match: { items.5.update.status: 200 } + - match: { items.6.delete.status: 200 } + + - do: # superuser + search: + index: write_index_1 + body: { "query": { "terms": { "_id": [ "21" ] } } } + - match: { hits.total: 1 } + + - do: + indices.delete_alias: + index: "write_index_2" + name: [ "can_write_2", "can_read_2" ] + ignore: 404 + + - do: + indices.delete_alias: + index: "write_index_1" + name: [ "can_write_3" ] + ignore: 404 diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java index 0386ca47c3498..6fe93aec3a4a2 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java @@ -9,6 +9,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -17,8 +19,6 @@ import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.xpack.core.security.rest.RestRequestFilter; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequest; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler; import java.io.IOException; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java index 5be1236574f37..cbee4c7dc947b 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java @@ -19,13 +19,13 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchAction; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequest; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction; import org.elasticsearch.xpack.watcher.watch.WatchParser; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionErrorIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionErrorIntegrationTests.java index 144a0a75b1516..6590d6c271b26 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionErrorIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionErrorIntegrationTests.java @@ -8,9 +8,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.actions.index.IndexAction; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java index c0be6c1c17f6b..9b851131d7d6d 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java @@ -8,13 +8,13 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; import java.util.Map; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java index 05256ba5fc476..b2a1e7bb2cd58 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.xpack.core.watcher.actions.Action; import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.core.watcher.execution.ActionExecutionMode; @@ -17,7 +18,6 @@ import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchRequestBuilder; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchResponse; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequest; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.actions.email.EmailAction; import org.elasticsearch.xpack.watcher.actions.index.IndexAction; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java index 9858a5cd11851..a69445386d027 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java @@ -44,12 +44,14 @@ public class WebhookHttpsIntegrationTests extends AbstractWatcherIntegrationTest @Override protected Settings nodeSettings(int nodeOrdinal) { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.pem"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.crt"); return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put("xpack.http.ssl.keystore.path", resource.toString()) - .put("xpack.http.ssl.keystore.password", "testnode") - .build(); + .put(super.nodeSettings(nodeOrdinal)) + .put("xpack.http.ssl.key", keyPath) + .put("xpack.http.ssl.certificate", certPath) + .put("xpack.http.ssl.keystore.password", "testnode") + .build(); } @Before diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java index 10618b36e8ae9..03dcd7947155e 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java @@ -169,30 +169,31 @@ public void testNoPathSpecified() throws Exception { } public void testHttps() throws Exception { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.jks"); + Path trustedCertPath = getDataPath("/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); Settings settings; if (randomBoolean()) { - secureSettings.setString("xpack.http.ssl.truststore.secure_password", "truststore-testnode-only"); settings = Settings.builder() - .put("xpack.http.ssl.truststore.path", resource.toString()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.http.ssl.certificate_authorities", trustedCertPath) + .setSecureSettings(secureSettings) + .build(); } else { - secureSettings.setString("xpack.ssl.truststore.secure_password", "truststore-testnode-only"); settings = Settings.builder() - .put("xpack.ssl.truststore.path", resource.toString()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate_authorities", trustedCertPath) + .setSecureSettings(secureSettings) + .build(); } try (HttpClient client = new HttpClient(settings, authRegistry, new SSLService(settings, environment))) { secureSettings = new MockSecureSettings(); // We can't use the client created above for the server since it is only a truststore - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings2 = Settings.builder() - .put("xpack.ssl.keystore.path", getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks")) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .setSecureSettings(secureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings2, environment); testSslMockWebserver(client, sslService.sslContext(), false); @@ -200,34 +201,40 @@ public void testHttps() throws Exception { } public void testHttpsDisableHostnameVerification() throws Exception { - Path resource = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.jks"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem"); Settings settings; if (randomBoolean()) { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.http.ssl.truststore.secure_password", "testnode-no-subjaltname"); - settings = Settings.builder() - .put("xpack.http.ssl.truststore.path", resource.toString()) - .put("xpack.http.ssl.verification_mode", randomFrom(VerificationMode.NONE, VerificationMode.CERTIFICATE)) - .setSecureSettings(secureSettings) - .build(); + Settings.Builder builder = Settings.builder() + .put("xpack.http.ssl.certificate_authorities", certPath); + if (inFipsJvm()) { + //Can't use TrustAllConfig in FIPS mode + builder.put("xpack.http.ssl.verification_mode", VerificationMode.CERTIFICATE); + } else { + builder.put("xpack.http.ssl.verification_mode", randomFrom(VerificationMode.NONE, VerificationMode.CERTIFICATE)); + } + settings = builder.build(); } else { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode-no-subjaltname"); - settings = Settings.builder() - .put("xpack.ssl.truststore.path", resource.toString()) - .put("xpack.ssl.verification_mode", randomFrom(VerificationMode.NONE, VerificationMode.CERTIFICATE)) - .setSecureSettings(secureSettings) - .build(); + Settings.Builder builder = Settings.builder() + .put("xpack.ssl.certificate_authorities", certPath); + if (inFipsJvm()) { + //Can't use TrustAllConfig in FIPS mode + builder.put("xpack.ssl.verification_mode", VerificationMode.CERTIFICATE); + } else { + builder.put("xpack.ssl.verification_mode", randomFrom(VerificationMode.NONE, VerificationMode.CERTIFICATE)); + } + settings = builder.build(); } try (HttpClient client = new HttpClient(settings, authRegistry, new SSLService(settings, environment))) { MockSecureSettings secureSettings = new MockSecureSettings(); // We can't use the client created above for the server since it only defines a truststore - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode-no-subjaltname"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode-no-subjaltname"); Settings settings2 = Settings.builder() - .put("xpack.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.jks")) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .setSecureSettings(secureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings2, environment); testSslMockWebserver(client, sslService.sslContext(), false); @@ -235,13 +242,15 @@ public void testHttpsDisableHostnameVerification() throws Exception { } public void testHttpsClientAuth() throws Exception { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", resource.toString()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .setSecureSettings(secureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings, environment); try (HttpClient client = new HttpClient(settings, authRegistry, sslService)) { @@ -365,30 +374,31 @@ public void testSetProxy() throws Exception { } public void testProxyCanHaveDifferentSchemeThanRequest() throws Exception { + Path trustedCertPath = getDataPath("/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.pem"); // this test fakes a proxy server that sends a response instead of forwarding it to the mock web server // on top of that the proxy request is HTTPS but the real request is HTTP only MockSecureSettings serverSecureSettings = new MockSecureSettings(); // We can't use the client created above for the server since it is only a truststore - serverSecureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + serverSecureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings serverSettings = Settings.builder() - .put("xpack.ssl.keystore.path", getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks")) - .setSecureSettings(serverSecureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .setSecureSettings(serverSecureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(serverSettings, environment); try (MockWebServer proxyServer = new MockWebServer(sslService.sslContext(), false)) { proxyServer.enqueue(new MockResponse().setResponseCode(200).setBody("fullProxiedContent")); proxyServer.start(); - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.jks"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.http.ssl.truststore.secure_password", "truststore-testnode-only"); Settings settings = Settings.builder() .put(HttpSettings.PROXY_HOST.getKey(), "localhost") .put(HttpSettings.PROXY_PORT.getKey(), proxyServer.getPort()) .put(HttpSettings.PROXY_SCHEME.getKey(), "https") - .put("xpack.http.ssl.truststore.path", resource.toString()) - .setSecureSettings(secureSettings) + .put("xpack.http.ssl.certificate_authorities", trustedCertPath) .build(); HttpRequest.Builder requestBuilder = HttpRequest.builder("localhost", webServer.getPort()) diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java index 189bf1d5b05ca..9093a6f86ae56 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java @@ -7,6 +7,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; @@ -18,7 +19,6 @@ import org.elasticsearch.xpack.core.watcher.condition.ExecutableCondition; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.input.Input; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.condition.CompareCondition; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.condition.NeverCondition; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java index 38008194b0ab1..2f578a1b880c3 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java @@ -7,12 +7,12 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.notification.email.EmailTemplate; import org.elasticsearch.xpack.watcher.notification.email.support.EmailServer; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java index 51652078dc96f..0e120793fbc18 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.http.MockResponse; @@ -18,7 +19,6 @@ import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.common.http.HttpMethod; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java index 6bf273b194c34..e7b6ad9223105 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.watcher.history; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java index 17575f58ec298..69de3bcf8714c 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java @@ -8,11 +8,11 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateTimeMappingsTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateTimeMappingsTests.java index 2259ad87eb17a..0ad674f7ed218 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateTimeMappingsTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateTimeMappingsTests.java @@ -10,9 +10,9 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java index 9ae83c79e9577..4ca412c5ce0da 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java @@ -11,12 +11,12 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequest; import org.elasticsearch.xpack.watcher.Watcher; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.condition.ScriptCondition; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java index 1c1d8dde8cfdb..18f151b4f6afb 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -20,7 +21,6 @@ import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.condition.CompareCondition; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/ExecutionVarsIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/ExecutionVarsIntegrationTests.java index 2f69cc95a50ef..0b3c0fc28ec05 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/ExecutionVarsIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/ExecutionVarsIntegrationTests.java @@ -8,12 +8,12 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchResponse; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.condition.ScriptCondition; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; import org.hamcrest.Matcher; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java index de48c240a3a77..5c9dafeaca001 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; @@ -17,7 +18,6 @@ import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.core.watcher.input.Input; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java index d98d6a44daf37..1ae49265352f7 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.core.watcher.actions.ActionStatus; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; @@ -23,7 +24,6 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.condition.CompareCondition; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java index 1f7335aef0df8..d629c54934fc1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java @@ -11,10 +11,10 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/WatchRequestValidationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/WatchRequestValidationTests.java index b1fa736292cc4..64b913f96a0df 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/WatchRequestValidationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/WatchRequestValidationTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.execution.ActionExecutionMode; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchRequest; @@ -16,7 +17,6 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequest; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequest; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java index 5da3f05177aae..1080880ea8bbb 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java @@ -15,13 +15,13 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.core.watcher.transport.actions.activate.ActivateWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsResponse; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; @@ -47,7 +47,7 @@ public class ActivateWatchTests extends AbstractWatcherIntegrationTestCase { protected boolean timeWarped() { return false; } - + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30699") public void testDeactivateAndActivate() throws Exception { PutWatchResponse putWatchResponse = watcherClient().preparePutWatch() diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java index b50204abf4176..76f71b9c95ed4 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; @@ -15,7 +16,6 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java index c307764c29f0d..53d5458706a63 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.watcher.transport.action.execute; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.actions.ActionStatus; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; import org.elasticsearch.xpack.core.watcher.execution.ActionExecutionMode; @@ -14,7 +15,6 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchRequestBuilder; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchResponse; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java index 9d13a2ec2a1d8..226879b5e3047 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java @@ -7,11 +7,11 @@ import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/put/PutWatchSerializationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/put/PutWatchSerializationTests.java index a26022d97aeb3..df3a324bdac17 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/put/PutWatchSerializationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/put/PutWatchSerializationTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequest; import static org.hamcrest.Matchers.is; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java index 6dcd5f199d538..329d9d976a960 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java @@ -15,11 +15,11 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequest; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.test.WatchExecutionContextMockBuilder; @@ -92,4 +92,4 @@ public void testHeadersAreFilteredWhenPuttingWatches() throws Exception { assertThat(capturedHeaders.keySet(), hasSize(1)); assertThat(capturedHeaders, hasKey(headerName)); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.crt b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.crt new file mode 100644 index 0000000000000..37e142afb23fe --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIJAJ+K5mGS3n/AMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp +Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTQxMjE2MTcwNDQ1WhcNMTgxMjE1MTcwNDQ1 +WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV +BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEAzhpW7iwkm+Og+HP7U00nbmh0Hy9Z2Ldp5i8tJSlSQwTxCCvO +rse6jwJQN98Dk1ApaSzimZrlKOotFyPV1L3fnOzJbTp1Yq/VsYP4zJkjWtID0qUf +8Rg8bLhjKAG+ZlLuai5XZqnLkdmqvQeR61VhpXWFm0Om153tWmAiHL18ywY71gXN +EnkeFo9OW4fDqkz6h7NJziYvU6URSKErZDEixk5GIPv9K9hiIfi0KQM6xaHp0d2w +VCyFVC0OUdugz6untURzJVx4U3X1bQcv/o2BoUotWh/5h8o5eeiiv2OGZ1XlO+33 +1tweYI4wFjDwnAyHHRr/rk2ZIBiBYGaSzHnuhQIDAQABo1owWDAJBgNVHRMEAjAA +MB0GA1UdDgQWBBTwGg2LF8+mzsvBBWxJKv6VXv3dMTAsBgNVHREEJTAjgglsb2Nh +bGhvc3SHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAIwDQYJKoZIhvcNAQELBQADggEB +ABP4ufLToJhcUselVxV9LPD5VGPEHGLdIFqsUEix7DMsiNpR76X6a8qNQbZpdbd6 ++qPKqoaMgC7znX7qZtCqRbIXTWbudZPxFkcHdiWx3SiALMQYabeUGetClX3sCndU +SUoV8f34i8dJxfNcqhLcsh4zpgxtmwsvs5OLMTBvm0Xo2zUFUjlmrt41pBrWEuq9 +nkObc/cr6Syiz3sy4pYVJO1/YwHaZgE/URqjVlari70DR3ES4YnIUnLQajKx2Q0/ +gXVgzjbe68KPOUGCz6GYiWq+d4tcWdHzLv1GsaqQ1MD9P21ArfrX4DpzgPDrO6MP +9Ppq5DQGa2q4mz3kipd5RIs= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.pem b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.pem new file mode 100644 index 0000000000000..5de9c813fa815 --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpQIBAAKCAQEAzhpW7iwkm+Og+HP7U00nbmh0Hy9Z2Ldp5i8tJSlSQwTxCCvO +rse6jwJQN98Dk1ApaSzimZrlKOotFyPV1L3fnOzJbTp1Yq/VsYP4zJkjWtID0qUf +8Rg8bLhjKAG+ZlLuai5XZqnLkdmqvQeR61VhpXWFm0Om153tWmAiHL18ywY71gXN +EnkeFo9OW4fDqkz6h7NJziYvU6URSKErZDEixk5GIPv9K9hiIfi0KQM6xaHp0d2w +VCyFVC0OUdugz6untURzJVx4U3X1bQcv/o2BoUotWh/5h8o5eeiiv2OGZ1XlO+33 +1tweYI4wFjDwnAyHHRr/rk2ZIBiBYGaSzHnuhQIDAQABAoIBAQCWgv3A6VPC1DUV +u/1qFAobwwQqUfYXIbqgcwtQ/CAq+UzcXsGNOiavkUzrwF1oEz5qpHDHJCr9+iX7 +pBvgRNksTG+86NgYvbgc7vee0qbTCFPqXNQ6ySw3aWBgMwXMy/t4Z2dEffNAC+l4 +KjMR3UR2BKERhhItnBNd0J6Yxnh/+jg1Uf5fVMEh1/WOGLYCJnnn5oEHNKeon6XR +dobIn2QjD/PB8ZX7UubrSXmyezU0e9h3ARoI3oPMV6f8XQSa5K/KRrk8FUkVQ4vI +5+YAMjtY/K2I8xAEoPyprD/ILAVN+3E47J0K14EfKNTajSzQFVJhaaCvs7btxScA +Sx/zRsvxAoGBAP5KMH6vamdnBlZTPT2jtrsmzjyC0Z+9lbNokzRmVribps+DFdAW +YsGCbfApcbOYmpdLSeccFTA+uT5IbQ8hwBbWn/HKm+y8EDAPklf5tL0+w7pCZ4kU +50pKk6cjSTv/CDjO+hy4KIz2H/zXivXEV+4FtFKOZ3qUVg7m+1c/u5lDAoGBAM99 +L8/S9jwCkOjv+TKhmK+2/S5tVy1fdjlurTu9nI46CYa9MaOndZKY6EJ9ekBLIHUQ +h1QAsdPRHgkObuKDUHmpLr7qmoTRE7vtWC3sHK382j5CBEK00p+09wFHA03Bf40f +Jdjlzqe9F9jO6LH2RL/TECQDe7RJaTOQJrNlVtiXAoGBAOUUsNtv68t7ZJogIuuE +sPmo2+Jnd7EQeexGKVbrWvS0RHJtBRmRESaC+ceBjozczWe+y7UH946e8wLI/HbF +UOdCMpUAkbeTNIIXhR78NXbHNEx3xg4YZsTmql3HzBHgjueejnOQ8/cJQ4fkJauC +VjR3rxswbshfGagTLhpLsBVBAoGBAMBf5mN+ynRLQMXoMRlDgIhyVf2kvO5BkyCe +wBkirTOlFc4KPirpCXZ5NObo5d8UiKxhPcehuT6VpY5qBl8XtxaFyOSUKd24594W +qeox/0lFpaeRl9etRZdztoxFpgeCv1s9pN6b+2XESYboGBFgLs/XxiBN5nT6l4KK +RYeRDttTAoGAMoAreVa/i1l5YChhyskBTt+nePHGomsXC9tv7mZFLOrPQ+CLy5Xd +4PQqqYjRaJT/aP3N/q0NcauSKxYKmgnhInXpaasSVzGrM60DQLVw+SXfTiwXN0dH +V/bq2ybdSxEh2xQoyrfpiFDkCEecY0nYCL1Ff7UYY6g8P/Qj8DBiZGI= +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt new file mode 100644 index 0000000000000..37e142afb23fe --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIJAJ+K5mGS3n/AMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp +Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTQxMjE2MTcwNDQ1WhcNMTgxMjE1MTcwNDQ1 +WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV +BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEAzhpW7iwkm+Og+HP7U00nbmh0Hy9Z2Ldp5i8tJSlSQwTxCCvO +rse6jwJQN98Dk1ApaSzimZrlKOotFyPV1L3fnOzJbTp1Yq/VsYP4zJkjWtID0qUf +8Rg8bLhjKAG+ZlLuai5XZqnLkdmqvQeR61VhpXWFm0Om153tWmAiHL18ywY71gXN +EnkeFo9OW4fDqkz6h7NJziYvU6URSKErZDEixk5GIPv9K9hiIfi0KQM6xaHp0d2w +VCyFVC0OUdugz6untURzJVx4U3X1bQcv/o2BoUotWh/5h8o5eeiiv2OGZ1XlO+33 +1tweYI4wFjDwnAyHHRr/rk2ZIBiBYGaSzHnuhQIDAQABo1owWDAJBgNVHRMEAjAA +MB0GA1UdDgQWBBTwGg2LF8+mzsvBBWxJKv6VXv3dMTAsBgNVHREEJTAjgglsb2Nh +bGhvc3SHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAIwDQYJKoZIhvcNAQELBQADggEB +ABP4ufLToJhcUselVxV9LPD5VGPEHGLdIFqsUEix7DMsiNpR76X6a8qNQbZpdbd6 ++qPKqoaMgC7znX7qZtCqRbIXTWbudZPxFkcHdiWx3SiALMQYabeUGetClX3sCndU +SUoV8f34i8dJxfNcqhLcsh4zpgxtmwsvs5OLMTBvm0Xo2zUFUjlmrt41pBrWEuq9 +nkObc/cr6Syiz3sy4pYVJO1/YwHaZgE/URqjVlari70DR3ES4YnIUnLQajKx2Q0/ +gXVgzjbe68KPOUGCz6GYiWq+d4tcWdHzLv1GsaqQ1MD9P21ArfrX4DpzgPDrO6MP +9Ppq5DQGa2q4mz3kipd5RIs= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt new file mode 100644 index 0000000000000..ced9d81d96fa6 --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDTTCCAjWgAwIBAgIJALL7dwEsWamvMA0GCSqGSIb3DQEBCwUAME8xDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEnMCUGA1UEAxMeRWxhc3Rp +Y3NlYXJjaCBUZXN0IE5vZGUgTm8gU0FOMB4XDTE0MTIxNjE5NTcyNloXDTE4MTIx +NTE5NTcyNlowTzEMMAoGA1UEChMDb3JnMRYwFAYDVQQLEw1lbGFzdGljc2VhcmNo +MScwJQYDVQQDEx5FbGFzdGljc2VhcmNoIFRlc3QgTm9kZSBObyBTQU4wggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCkIGS7A/V6TesR34ajMyNYL3tB1OjW +Raq4KtF8FfW1H6nHGrWa/qXjZWPirczy1k2n6ZL7YOCcv/YeY8xAqC9mGQxvEuqo +EaqXq2cjRdAs/7zqzRkdPPi3Jw/p/RHrDfOAzOsMnBGc0G2Hrsj//aP44vp85pek +fM3t2kNAYZWYCzXUqWAIUoxBDK4DcQdsN8H4KTMIwQEEiRtcKnL/b8QGKsyGLfLq +36ZABHZ4kY2SmcP3bWxZtbFN4hamdwoAtYe+lS0/ee8/fOTLyZ3Ey+X6EEmGO1lk +WR4XLli15k1L2HBzWGG7zwxVEC5r2h3Sx1njYh/Jq3khIdSvDbiMmM+VAgMBAAGj +LDAqMAkGA1UdEwQCMAAwHQYDVR0OBBYEFGm8wrYF9mJweJ1vloDw19e0PUuIMA0G +CSqGSIb3DQEBCwUAA4IBAQBbEZ73weDphNIcmvN25v6NIfjBebqgm0/2grDFwmZe +Z1DibzRoVfoQ7WeUqbPS7SHUQ+KzIN1GdfHXhW9r6mmLbtzPv90Q/8zBcNv5HNZZ +YK+T2r9hoAWEY6nB1fiOJ4udkFMYfAi6LiSxave4IPWp/WIqd0IWtPtkPl+MmG41 +TfRom8TnO+o+VsjgDkY5Q1JDsNQKy1BrtxzIZyz7d1zYKTQ+HXZ4yeYJoVoc3k4y +6w9eX2zAUZ6Z3d4an6CLr6Hew9Dj2VX1vqCj1a5/VvHZVyVxyh4hg8sHYm7tZOJX +wN3B5GcKwbbFjaMVBLaMlP62OdGg7tCh61evWm+l06S0 +-----END CERTIFICATE----- diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem new file mode 100644 index 0000000000000..b0f7a585d7f9b --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-192-CBC,2F36F79E75ACA7803AF1BC1B70C2360C + +d4/f7dnpPW9DfhGXuGDx7r56BjQ64iNcsQdrC1CTZB363iAvBBeHaoJFaWpQOUmT +WCBdM6YmRuMi6z4sMtm8Vre3i+Mu2Jti0LTDXqH8Y6FMpptxAYoFb9hkM0OvUGDJ +ASLGTa1XKyCzZWOe2IGLtWIS8zkGv/fSXHqDJV8ddq8uLHGZXBxAye5oJUD0654T +DasIllbiTlBEYlEJ10OyIAspS47n4LWVE5ggyGdiyIgh5s3lAMPO3+yenDU3/dzj +YF2zHKnfe0Nx2lUQVJRYknQvFCx0WTGH9gNl40FovoOMHyRjEuOHD9PFNvL/TCh6 +9h0jJPWu/SsIiYaIqR0pDgqWdHXHA5Ea3s2+w0YHbv7DqkGXWZdceLUdZtcXCBJk +P8QL9IWa7VE50SS6wV5uXX9tV5lHzMFsNGkwtGcR1UMU5cXYqckFXgoBqDN0fyWe +V5iEknSJ4Na+MHv75rvRat0kv6upu9i5NSpYTc5jLHdWZWsYMZ/ZMiMoLBP+KAPT +DQ3eyph/84BU3DePaQF3Rsp0ZvPxqQ361Zwc4zC5CKbHur1QX8WAY5XlBMfuBpkf +CKr5wgwF+ZpS7zsfUpMPPe9Y1E8TWnhx/DtCVLEslBpr2u/rMaxPp6ev9/Wry7N+ +UFBOwodipBhlfSvLqjc511L+bXRzqXiINuW0eSKUQv0J/G0Ur894kJJ6feDYlskj +JiZjOgOyyKhB+K9AXmkfRdvWUJeweL8pgDuYSyQdQ0zoUCZALEoYK2cBWzti/wep +QPFD5oz8076aXNHKMHLsRmSFuEZb6IN0PtUNVf958EbrtABNIuoufKlKtJsEnUyK +VHIEUxExEgyECiozKnxvhr7RQ9nTQXhNdgtec6jJblYnla/+OWAfHdxtHhBjp5AX +WvLyUhmgrmLNdKd1KSzcXynBHgh0hi0HJXYx31FilwbxsdhwN1LwN/Do4T4qGkUr +InrQC3ZHRuh0yAOPrwRFEWbW5m/PzVP/xYVgFtVWP7w38ftZbaBy5xPmtswn+PH+ +cIMt1Y9PaAlhLNpW/Vfn503T9M+05vu73vbU1xgu/B1kePOqE/WO0cOZl0KdaMmT +wAQBKuI7qTACH+/8g3Uir1YSChLthH+1Gs6h686EP6ZydwXq9GYXXkNmJNJJsnmU +RDjoT0F4XBKvcQdX3EeQYs3Af2yZWFDC59c1Ews2dqMK7vy2tYITbx2yn30DBDAl +xvjp2izzmAgQJEG/RqCYsUHCCEv7wz3tpsSOkFem9IHZpR2h8Rqy88GH9qYOkgwo ++fKSmIgC4RLQXsHuh7RRuyNc2FaWDgRgSxs5V4f9xOSU/ZbUftYWnwEyCwbu3RJp +CIXQFZhzU2t5l1Eh+x40rwpEJDXBEwmOIUO3x1oOqGZPPEQ674uMal5TRjvdOVGD +h665Fpo5Xu9EQwQZHYddeRl/7yw8F6LCxBLgHlngKRHHGDUHlTscLfYRqNh+x3jT +3S8dfaGzlnwdQEx32gyLAV0/nsFnzh1AknFMT8jesIYF7PLiAi67PNyNwRCc7TFp +jpKvzkDRVP72bivTmCyP5aKR0Q2oIrAw51MMinT6R2VaoR7COjoVbqYsRLwkxu+p +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java similarity index 73% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchRequest.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java index 47bc800cec72b..0bfa7dc7d343f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchRequest.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java @@ -1,37 +1,54 @@ /* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. */ -package org.elasticsearch.xpack.core.watcher.transport.actions.put; - +package org.elasticsearch.protocol.xpack.watcher; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder; -import org.elasticsearch.xpack.core.watcher.support.WatcherUtils; import java.io.IOException; +import java.util.regex.Pattern; /** * This request class contains the data needed to create a watch along with the name of the watch. * The name of the watch will become the ID of the indexed document. */ -public class PutWatchRequest extends ActionRequest { +public final class PutWatchRequest extends ActionRequest { + + private static final Pattern NO_WS_PATTERN = Pattern.compile("\\S+"); private String id; private BytesReference source; - private boolean active = true; private XContentType xContentType = XContentType.JSON; + private boolean active = true; private long version = Versions.MATCH_ANY; - public PutWatchRequest() { + public PutWatchRequest() {} + + public PutWatchRequest(StreamInput in) throws IOException { + readFrom(in); } public PutWatchRequest(String id, BytesReference source, XContentType xContentType) { @@ -40,8 +57,9 @@ public PutWatchRequest(String id, BytesReference source, XContentType xContentTy this.xContentType = xContentType; } - public PutWatchRequest(StreamInput in) throws IOException { - super(in); + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); id = in.readString(); source = in.readBytesReference(); active = in.readBoolean(); @@ -80,13 +98,6 @@ public BytesReference getSource() { return source; } - /** - * Set the source of the watch - */ - public void setSource(WatchSourceBuilder source) { - setSource(source.buildAsBytes(XContentType.JSON), XContentType.JSON); - } - /** * Set the source of the watch */ @@ -129,7 +140,7 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (id == null) { validationException = ValidateActions.addValidationError("watch id is missing", validationException); - } else if (WatcherUtils.isValidId(id) == false) { + } else if (isValidId(id) == false) { validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); } if (source == null) { @@ -141,8 +152,7 @@ public ActionRequestValidationException validate() { return validationException; } - @Override - public void readFrom(StreamInput in) throws IOException { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + public static boolean isValidId(String id) { + return Strings.isEmpty(id) == false && NO_WS_PATTERN.matcher(id).matches(); } } diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java new file mode 100644 index 0000000000000..98467f32dd860 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java @@ -0,0 +1,124 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class PutWatchResponse extends ActionResponse implements ToXContentObject { + + private static final ObjectParser PARSER + = new ObjectParser<>("x_pack_put_watch_response", PutWatchResponse::new); + static { + PARSER.declareString(PutWatchResponse::setId, new ParseField("_id")); + PARSER.declareLong(PutWatchResponse::setVersion, new ParseField("_version")); + PARSER.declareBoolean(PutWatchResponse::setCreated, new ParseField("created")); + } + + private String id; + private long version; + private boolean created; + + public PutWatchResponse() { + } + + public PutWatchResponse(String id, long version, boolean created) { + this.id = id; + this.version = version; + this.created = created; + } + + private void setId(String id) { + this.id = id; + } + + private void setVersion(long version) { + this.version = version; + } + + private void setCreated(boolean created) { + this.created = created; + } + + public String getId() { + return id; + } + + public long getVersion() { + return version; + } + + public boolean isCreated() { + return created; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + PutWatchResponse that = (PutWatchResponse) o; + + return Objects.equals(id, that.id) && Objects.equals(version, that.version) && Objects.equals(created, that.created); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, created); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeVLong(version); + out.writeBoolean(created); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readVLong(); + created = in.readBoolean(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("_id", id) + .field("_version", version) + .field("created", created) + .endObject(); + } + + public static PutWatchResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java new file mode 100644 index 0000000000000..d0aadef161175 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class PutWatchResponseTests extends AbstractXContentTestCase { + + @Override + protected PutWatchResponse createTestInstance() { + String id = randomAlphaOfLength(10); + long version = randomLongBetween(1, 10); + boolean created = randomBoolean(); + return new PutWatchResponse(id, version, created); + } + + @Override + protected PutWatchResponse doParseInstance(XContentParser parser) throws IOException { + return PutWatchResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 7820cbc06f5a0..07529acdb8815 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -7,6 +7,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; @@ -185,23 +186,32 @@ public void testCreateJobsWithIndexNameOption() throws Exception { + "anomaly_detectors/" + jobId2, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON)); assertEquals(200, response.getStatusLine().getStatusCode()); - response = client().performRequest("get", "_aliases"); - assertEquals(200, response.getStatusLine().getStatusCode()); - String responseAsString = responseEntityToString(response); + // With security enabled GET _aliases throws an index_not_found_exception + // if no aliases have been created. In multi-node tests the alias may not + // appear immediately so wait here. + assertBusy(() -> { + try { + Response aliasesResponse = client().performRequest("get", "_aliases"); + assertEquals(200, aliasesResponse.getStatusLine().getStatusCode()); + String responseAsString = responseEntityToString(aliasesResponse); + assertThat(responseAsString, + containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName("custom-" + indexName) + "\":{\"aliases\":{")); + assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + + "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId1 + "\",\"boost\":1.0}}}}")); + assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId1) + "\":{}")); + assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId2) + + "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId2 + "\",\"boost\":1.0}}}}")); + assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId2) + "\":{}")); + } catch (ResponseException e) { + throw new AssertionError(e); + } + }); + Response indicesResponse = client().performRequest("get", "_cat/indices"); + assertEquals(200, indicesResponse.getStatusLine().getStatusCode()); + String responseAsString = responseEntityToString(indicesResponse); assertThat(responseAsString, - containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName("custom-" + indexName) + "\":{\"aliases\":{")); - assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) - + "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId1 + "\",\"boost\":1.0}}}}")); - assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId1) + "\":{}")); - assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId2) - + "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId2 + "\",\"boost\":1.0}}}}")); - assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId2) + "\":{}")); - - response = client().performRequest("get", "_cat/indices"); - assertEquals(200, response.getStatusLine().getStatusCode()); - responseAsString = responseEntityToString(response); - assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)); + containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)); assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1)))); assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2)))); @@ -438,7 +448,6 @@ public void testDeleteJobAfterMissingIndex() throws Exception { client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32034") public void testDeleteJobAfterMissingAliases() throws Exception { String jobId = "delete-job-after-missing-alias-job"; String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); @@ -446,15 +455,24 @@ public void testDeleteJobAfterMissingAliases() throws Exception { String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); - Response response = client().performRequest("get", "_cat/aliases"); - assertEquals(200, response.getStatusLine().getStatusCode()); - String responseAsString = responseEntityToString(response); - assertThat(responseAsString, containsString(readAliasName)); - assertThat(responseAsString, containsString(writeAliasName)); + // With security enabled cat aliases throws an index_not_found_exception + // if no aliases have been created. In multi-node tests the alias may not + // appear immediately so wait here. + assertBusy(() -> { + try { + Response aliasesResponse = client().performRequest(new Request("get", "_cat/aliases")); + assertEquals(200, aliasesResponse.getStatusLine().getStatusCode()); + String responseAsString = responseEntityToString(aliasesResponse); + assertThat(responseAsString, containsString(readAliasName)); + assertThat(responseAsString, containsString(writeAliasName)); + } catch (ResponseException e) { + throw new AssertionError(e); + } + }); // Manually delete the aliases so that we can test that deletion proceeds // normally anyway - response = client().performRequest("delete", indexName + "/_alias/" + readAliasName); + Response response = client().performRequest("delete", indexName + "/_alias/" + readAliasName); assertEquals(200, response.getStatusLine().getStatusCode()); response = client().performRequest("delete", indexName + "/_alias/" + writeAliasName); assertEquals(200, response.getStatusLine().getStatusCode()); diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java index 9ff80bc739b16..d7a2b857bf359 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java @@ -72,10 +72,14 @@ public void test() throws Exception { openJob(job.getId()); String forecastId = forecast(job.getId(), TimeValue.timeValueHours(3), null); waitForecastToFinish(job.getId(), forecastId); - ForecastRequestStats forecastStats = getForecastStats(job.getId(), forecastId); - assertThat(forecastStats.getMessages(), anyOf(nullValue(), empty())); - assertThat(forecastStats.getMemoryUsage(), greaterThan(0L)); - assertEquals(forecastStats.getRecordCount(), 3L); + // In a multi-node cluster the replica may not be up to date + // so wait for the change + assertBusy(() -> { + ForecastRequestStats forecastStats = getForecastStats(job.getId(), forecastId); + assertThat(forecastStats.getMessages(), anyOf(nullValue(), empty())); + assertThat(forecastStats.getMemoryUsage(), greaterThan(0L)); + assertThat(forecastStats.getRecordCount(), equalTo(3L)); + }); closeJob(job.getId()); diff --git a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java index b0142ae141853..43ad4dc0a45a2 100644 --- a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java +++ b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java @@ -6,12 +6,16 @@ package org.elasticsearch.multi_node; import org.apache.http.HttpStatus; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -33,8 +37,8 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isOneOf; @@ -73,6 +77,31 @@ public void clearRollupMetadata() throws Exception { public void testBigRollup() throws Exception { final int numDocs = 200; + String dateFormat = "strict_date_optional_time"; + + // create the test-index index + try (XContentBuilder builder = jsonBuilder()) { + builder.startObject(); + { + builder.startObject("mappings").startObject("_doc") + .startObject("properties") + .startObject("timestamp") + .field("type", "date") + .field("format", dateFormat) + .endObject() + .startObject("value") + .field("type", "integer") + .endObject() + .endObject() + .endObject().endObject(); + } + builder.endObject(); + final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); + Request req = new Request("PUT", "rollup-docs"); + req.setEntity(entity); + client().performRequest(req); + } + // index documents for the rollup job final StringBuilder bulk = new StringBuilder(); @@ -88,13 +117,15 @@ public void testBigRollup() throws Exception { bulkRequest.addParameter("refresh", "true"); bulkRequest.setJsonEntity(bulk.toString()); client().performRequest(bulkRequest); + // create the rollup job final Request createRollupJobRequest = new Request("PUT", "/_xpack/rollup/job/rollup-job-test"); + int pageSize = randomIntBetween(2, 50); createRollupJobRequest.setJsonEntity("{" + "\"index_pattern\":\"rollup-*\"," + "\"rollup_index\":\"results-rollup\"," - + "\"cron\":\"*/1 * * * * ?\"," // fast cron and big page size so test runs quickly - + "\"page_size\":20," + + "\"cron\":\"*/1 * * * * ?\"," // fast cron so test runs quickly + + "\"page_size\":" + pageSize + "," + "\"groups\":{" + " \"date_histogram\":{" + " \"field\":\"timestamp\"," @@ -142,7 +173,8 @@ public void testBigRollup() throws Exception { " \"date_histo\": {\n" + " \"date_histogram\": {\n" + " \"field\": \"timestamp\",\n" + - " \"interval\": \"1h\"\n" + + " \"interval\": \"1h\",\n" + + " \"format\": \"date_time\"\n" + " },\n" + " \"aggs\": {\n" + " \"the_max\": {\n" + @@ -226,7 +258,7 @@ private void assertRollUpJob(final String rollupJob) throws Exception { } - private void waitForRollUpJob(final String rollupJob,String[] expectedStates) throws Exception { + private void waitForRollUpJob(final String rollupJob, String[] expectedStates) throws Exception { assertBusy(() -> { final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); Response getRollupJobResponse = client().performRequest(getRollupJobRequest); @@ -317,10 +349,4 @@ private void deleteAllJobs() throws Exception { } } } - - private static String responseEntityToString(Response response) throws Exception { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) { - return reader.lines().collect(Collectors.joining("\n")); - } - } } diff --git a/x-pack/qa/openldap-tests/build.gradle b/x-pack/qa/openldap-tests/build.gradle index 24976ab6113ab..5c0399a1d9fa2 100644 --- a/x-pack/qa/openldap-tests/build.gradle +++ b/x-pack/qa/openldap-tests/build.gradle @@ -17,6 +17,7 @@ task openLdapFixture { String outputDir = "${project.buildDir}/generated-resources/${project.name}" task copyIdpTrust(type: Copy) { from idpFixtureProject.file('src/main/resources/certs/idptrust.jks'); + from idpFixtureProject.file('src/main/resources/certs/ca.crt'); into outputDir } if (project.rootProject.vagrantSupported) { diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java index 1c9d93873a493..c7a92dccab82c 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java @@ -34,7 +34,6 @@ import java.util.Locale; import java.util.Objects; -import static org.elasticsearch.test.OpenLdapTests.LDAPTRUST_PATH; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -45,22 +44,20 @@ public class OpenLdapUserSearchSessionFactoryTests extends ESTestCase { private Settings globalSettings; private ThreadPool threadPool; - private MockSecureSettings globalSecureSettings; + private static final String LDAPCACERT_PATH = "/ca.crt"; @Before public void init() throws Exception { - Path keystore = getDataPath(LDAPTRUST_PATH); + Path caPath = getDataPath(LDAPCACERT_PATH); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. * If we re-use a SSLContext, previously connected sessions can get re-established which breaks hostname * verification tests since a re-established connection does not perform hostname verification. */ - globalSecureSettings = newSecureSettings("xpack.ssl.truststore.secure_password", "changeit"); globalSettings = Settings.builder() - .put("path.home", createTempDir()) - .put("xpack.ssl.truststore.path", keystore) - .setSecureSettings(globalSecureSettings) - .build(); + .put("path.home", createTempDir()) + .put("xpack.ssl.certificate_authorities", caPath) + .build(); threadPool = new TestThreadPool("LdapUserSearchSessionFactoryTests"); } @@ -94,7 +91,6 @@ public void testUserSearchWithBindUserOpenLDAP() throws Exception { .put(globalSettings, false); builder.put(Settings.builder().put(config.settings(), false).normalizePrefix("xpack.security.authc.realms.oldap-test.").build()); final MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.merge(globalSecureSettings); if (useSecureBindPassword) { secureSettings.setString("xpack.security.authc.realms.oldap-test.secure_bind_password", OpenLdapTests.PASSWORD); } diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java index f277f5d84b371..b55431dee1b00 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java @@ -177,4 +177,4 @@ protected String bindPassword() { protected String trustPath() { return "/idptrust.jks"; } -} \ No newline at end of file +} diff --git a/x-pack/qa/rolling-upgrade/with-system-key/build.gradle b/x-pack/qa/rolling-upgrade/with-system-key/build.gradle index e69de29bb2d1d..03505e01dedd8 100644 --- a/x-pack/qa/rolling-upgrade/with-system-key/build.gradle +++ b/x-pack/qa/rolling-upgrade/with-system-key/build.gradle @@ -0,0 +1 @@ +group = "${group}.x-pack.qa.rolling-upgrade.with-system-key" diff --git a/x-pack/qa/rolling-upgrade/without-system-key/build.gradle b/x-pack/qa/rolling-upgrade/without-system-key/build.gradle index e69de29bb2d1d..aa7ac502eb3e6 100644 --- a/x-pack/qa/rolling-upgrade/without-system-key/build.gradle +++ b/x-pack/qa/rolling-upgrade/without-system-key/build.gradle @@ -0,0 +1 @@ +group = "${group}.x-pack.qa.rolling-upgrade.without-system-key" diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java index e11b62642eb57..dde0b7645df13 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java +++ b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.Name; import org.hamcrest.Matchers; import org.junit.After; +import org.junit.BeforeClass; import javax.security.auth.x500.X500Principal; @@ -96,6 +97,11 @@ private Path initTempDir() throws Exception { return tempDir; } + @BeforeClass + public static void checkFipsJvm() { + assumeFalse("Can't run in a FIPS JVM, depends on Non FIPS BouncyCastle", inFipsJvm()); + } + @After public void tearDown() throws Exception { IOUtils.close(jimfs); diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java index 795dd074a80d7..706d5dbab5f5a 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java +++ b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java @@ -47,6 +47,7 @@ import org.elasticsearch.xpack.core.ssl.CertificateTool.Name; import org.hamcrest.Matchers; import org.junit.After; +import org.junit.BeforeClass; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.TrustManagerFactory; @@ -108,6 +109,11 @@ private Path initTempDir() throws Exception { return tempDir; } + @BeforeClass + public static void chechFipsJvm() { + assumeFalse("Can't run in a FIPS JVM, depends on Non FIPS BouncyCastle", inFipsJvm()); + } + @After public void tearDown() throws Exception { IOUtils.close(jimfs); diff --git a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java index 52c2a7dfa2d2d..c427d8bf32c86 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java +++ b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java @@ -127,7 +127,6 @@ private boolean getMonitoringUsageExportersDefined() throws Exception { return monitoringUsage.get().getExporters().isEmpty() == false; } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31940") public void testHTTPExporterWithSSL() throws Exception { // Ensures that the exporter is actually on assertBusy(() -> assertThat("[_http] exporter is not defined", getMonitoringUsageExportersDefined(), is(true))); diff --git a/x-pack/qa/sql/security/build.gradle b/x-pack/qa/sql/security/build.gradle index 5c3169d9d20dc..15f7734f9422e 100644 --- a/x-pack/qa/sql/security/build.gradle +++ b/x-pack/qa/sql/security/build.gradle @@ -4,6 +4,8 @@ dependencies { Project mainProject = project +group = "${group}.x-pack.qa.sql.security" + subprojects { // Use resources from the parent project in subprojects sourceSets { diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java index 98594917129f2..32452a609e245 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java @@ -40,7 +40,7 @@ public class ADLdapUserSearchSessionFactoryTests extends AbstractActiveDirectory @Before public void init() throws Exception { - Path keystore = getDataPath("support/ADtrust.jks"); + Path certPath = getDataPath("support/smb_ca.crt"); Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. @@ -49,10 +49,9 @@ public void init() throws Exception { */ globalSettings = Settings.builder() - .put("path.home", createTempDir()) - .put("xpack.ssl.truststore.path", keystore) - .setSecureSettings(newSecureSettings("xpack.ssl.truststore.secure_password", "changeit")) - .build(); + .put("path.home", createTempDir()) + .put("xpack.ssl.certificate_authorities", certPath) + .build(); sslService = new SSLService(globalSettings, env); threadPool = new TestThreadPool("ADLdapUserSearchSessionFactoryTests"); } diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java index 7ef1bd674a32b..829e87c849df6 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java @@ -23,9 +23,16 @@ import org.elasticsearch.xpack.core.ssl.VerificationMode; import org.junit.Before; +import java.io.IOException; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; import java.security.AccessController; import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.List; public abstract class AbstractActiveDirectoryTestCase extends ESTestCase { @@ -48,11 +55,25 @@ public abstract class AbstractActiveDirectoryTestCase extends ESTestCase { protected SSLService sslService; protected Settings globalSettings; protected boolean useGlobalSSL; + protected List certificatePaths; @Before public void initializeSslSocketFactory() throws Exception { useGlobalSSL = randomBoolean(); - Path truststore = getDataPath("../ldap/support/ADtrust.jks"); + // We use certificates in PEM format and `ssl.certificate_authorities` instead of ssl.trustore + // so that these tests can also run in a FIPS JVM where JKS keystores can't be used. + certificatePaths = new ArrayList<>(); + Files.walkFileTree(getDataPath + ("../ldap/support"), new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + String fileName = file.getFileName().toString(); + if (fileName.endsWith(".crt")) { + certificatePaths.add(getDataPath("../ldap/support/" + fileName).toString()); + } + return FileVisitResult.CONTINUE; + } + }); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. * If we re-use a SSLContext, previously connected sessions can get re-established which breaks hostname @@ -60,20 +81,16 @@ public void initializeSslSocketFactory() throws Exception { */ Settings.Builder builder = Settings.builder().put("path.home", createTempDir()); if (useGlobalSSL) { - builder.put("xpack.ssl.truststore.path", truststore) - .put("xpack.ssl.truststore.password", "changeit"); + builder.putList("xpack.ssl.certificate_authorities", certificatePaths); // fake realm to load config with certificate verification mode - builder.put("xpack.security.authc.realms.bar.ssl.truststore.path", truststore); - builder.put("xpack.security.authc.realms.bar.ssl.truststore.password", "changeit"); + builder.putList("xpack.security.authc.realms.bar.ssl.certificate_authorities", certificatePaths); builder.put("xpack.security.authc.realms.bar.ssl.verification_mode", VerificationMode.CERTIFICATE); } else { // fake realms so ssl will get loaded - builder.put("xpack.security.authc.realms.foo.ssl.truststore.path", truststore); - builder.put("xpack.security.authc.realms.foo.ssl.truststore.password", "changeit"); + builder.putList("xpack.security.authc.realms.foo.ssl.certificate_authorities", certificatePaths); builder.put("xpack.security.authc.realms.foo.ssl.verification_mode", VerificationMode.FULL); - builder.put("xpack.security.authc.realms.bar.ssl.truststore.path", truststore); - builder.put("xpack.security.authc.realms.bar.ssl.truststore.password", "changeit"); + builder.putList("xpack.security.authc.realms.bar.ssl.certificate_authorities", certificatePaths); builder.put("xpack.security.authc.realms.bar.ssl.verification_mode", VerificationMode.CERTIFICATE); } globalSettings = builder.build(); @@ -99,8 +116,7 @@ Settings buildAdSettings(String ldapUrl, String adDomainName, String userSearchD builder.put(SessionFactorySettings.HOSTNAME_VERIFICATION_SETTING, hostnameVerification); } if (useGlobalSSL == false) { - builder.put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit"); + builder.putList("ssl.certificate_authorities", certificatePaths); } return builder.build(); } diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index 11da59e44d6fe..d6b2e321d6c8b 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.ldap.ActiveDirectorySessionFactorySettings; @@ -100,7 +99,8 @@ public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase ) }; - protected static final String TESTNODE_KEYSTORE = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"; + protected static final String TESTNODE_KEY = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"; + protected static final String TESTNODE_CERT = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"; protected static RealmConfig realmConfig; protected static List roleMappings; protected static boolean useGlobalSSL; @@ -122,7 +122,8 @@ public static void cleanupRealm() { @Override protected Settings nodeSettings(int nodeOrdinal) { final RealmConfig realm = AbstractAdLdapRealmTestCase.realmConfig; - Path store = getDataPath(TESTNODE_KEYSTORE); + final Path nodeCert = getDataPath(TESTNODE_CERT); + final Path nodeKey = getDataPath(TESTNODE_KEY); Settings.Builder builder = Settings.builder(); // don't use filter since it returns a prefixed secure setting instead of mock! Settings settingsToAdd = super.nodeSettings(nodeOrdinal); @@ -156,14 +157,15 @@ protected Settings nodeSettings(int nodeOrdinal) { } } } - addSslSettingsForStore(builder, store, "testnode"); - builder.put(buildRealmSettings(realm, roleMappings, store)); + addSslSettingsForKeyPair(builder, nodeKey, "testnode", nodeCert, getNodeTrustedCertificates()); + builder.put(buildRealmSettings(realm, roleMappings, getNodeTrustedCertificates())); return builder.build(); } - protected Settings buildRealmSettings(RealmConfig realm, List roleMappingEntries, Path store) { + protected Settings buildRealmSettings(RealmConfig realm, List roleMappingEntries, List + certificateAuthorities) { Settings.Builder builder = Settings.builder(); - builder.put(realm.buildSettings(store, "testnode")); + builder.put(realm.buildSettings(certificateAuthorities)); configureFileRoleMappings(builder, roleMappingEntries); return builder.build(); } @@ -216,10 +218,11 @@ private List getRoleMappingContent(Function co @Override protected Settings transportClientSettings() { if (useGlobalSSL) { - Path store = getDataPath(TESTNODE_KEYSTORE); + Path key = getDataPath(TESTNODE_KEY); + Path cert = getDataPath(TESTNODE_CERT); Settings.Builder builder = Settings.builder() .put(super.transportClientSettings().filter((s) -> s.startsWith("xpack.ssl.") == false)); - addSslSettingsForStore(builder, store, "testnode"); + addSslSettingsForKeyPair(builder, key, "testnode", cert, getNodeTrustedCertificates()); return builder.build(); } else { return super.transportClientSettings(); @@ -304,14 +307,33 @@ protected static String userHeader(String username, String password) { return UsernamePasswordToken.basicAuthHeaderValue(username, new SecureString(password.toCharArray())); } - private void addSslSettingsForStore(Settings.Builder builder, Path store, String password) { - SecuritySettingsSource.addSecureSettings(builder, secureSettings -> { - secureSettings.setString("xpack.ssl.keystore.secure_password", password); - secureSettings.setString("xpack.ssl.truststore.secure_password", password); - }); - builder.put("xpack.ssl.keystore.path", store) - .put("xpack.ssl.verification_mode", "certificate") - .put("xpack.ssl.truststore.path", store); + private void addSslSettingsForKeyPair(Settings.Builder builder, Path key, String keyPassphrase, Path cert, + List certificateAuthorities) { + builder.put("xpack.ssl.key", key) + .put("xpack.ssl.key_passphrase", keyPassphrase) + .put("xpack.ssl.verification_mode", "certificate") + .put("xpack.ssl.certificate", cert) + .putList("xpack.ssl.certificate_authorities", certificateAuthorities); + } + + /** + * Collects all the certificates that are normally trusted by the node ( contained in testnode.jks ) + */ + List getNodeTrustedCertificates() { + Path testnodeCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path testnodeClientProfileCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-client-profile.crt"); + Path activedirCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/active-directory-ca.crt"); + Path testclientCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"); + Path openldapCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/openldap.crt"); + Path samba4Cert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt"); + return Arrays.asList(testnodeCert.toString(), testnodeClientProfileCert.toString(), activedirCert.toString(), testclientCert + .toString(), openldapCert.toString(), samba4Cert.toString()); } static class RoleMappingEntry { @@ -429,19 +451,19 @@ enum RealmConfig { this.mapGroupsAsRoles = randomBoolean(); } - public Settings buildSettings(Path store, String password) { - return buildSettings(store, password, 1); + public Settings buildSettings(List certificateAuthorities) { + return buildSettings(certificateAuthorities, 1); } - protected Settings buildSettings(Path store, String password, int order) { + + protected Settings buildSettings(List certificateAuthorities, int order) { Settings.Builder builder = Settings.builder() - .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".order", order) - .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".hostname_verification", false) - .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".unmapped_groups_as_roles", mapGroupsAsRoles) - .put(this.settings); + .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".order", order) + .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".hostname_verification", false) + .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".unmapped_groups_as_roles", mapGroupsAsRoles) + .put(this.settings); if (useGlobalSSL == false) { - builder.put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".ssl.truststore.path", store) - .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".ssl.truststore.password", password); + builder.putList(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".ssl.certificate_authorities", certificateAuthorities); } return builder.build(); diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java index fb7ea6c5dd754..330ec6b9a758c 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java @@ -132,4 +132,4 @@ protected String bindPassword() { protected String trustPath() { return "/org/elasticsearch/xpack/security/authc/ldap/support/ADtrust.jks"; } -} \ No newline at end of file +} diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index 8f85c250f481b..614d6659f2d80 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -280,10 +280,9 @@ public void testStandardLdapConnection() throws Exception { .build(); if (useGlobalSSL == false) { settings = Settings.builder() - .put(settings) - .put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit") - .build(); + .put(settings) + .putList("ssl.certificate_authorities", certificatePaths) + .build(); } RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); @@ -317,10 +316,9 @@ public void testHandlingLdapReferralErrors() throws Exception { ignoreReferralErrors); if (useGlobalSSL == false) { settings = Settings.builder() - .put(settings) - .put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit") - .build(); + .put(settings) + .putList("ssl.certificate_authorities", certificatePaths) + .build(); } RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); @@ -346,10 +344,9 @@ public void testStandardLdapWithAttributeGroups() throws Exception { Settings settings = LdapTestCase.buildLdapSettings(new String[] { AD_LDAP_URL }, userTemplate, false); if (useGlobalSSL == false) { settings = Settings.builder() - .put(settings) - .put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit") - .build(); + .put(settings) + .putList("ssl.certificate_authorities", certificatePaths) + .build(); } RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); @@ -408,8 +405,7 @@ private Settings buildAdSettings(String ldapUrl, String adDomainName, boolean ho } if (useGlobalSSL == false) { - builder.put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit"); + builder.putList("ssl.certificate_authorities", certificatePaths); } if (useBindUser) { diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java index c4e07a846fd56..3d1cdb202d19d 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java @@ -10,7 +10,6 @@ import org.junit.BeforeClass; import java.io.IOException; -import java.nio.file.Path; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; @@ -46,9 +45,9 @@ protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder(); builder.put(super.nodeSettings(nodeOrdinal)); - Path store = getDataPath(TESTNODE_KEYSTORE); final List secondaryRoleMappings = secondaryRealmConfig.selectRoleMappings(() -> true); - final Settings secondarySettings = super.buildRealmSettings(secondaryRealmConfig, secondaryRoleMappings, store); + final Settings secondarySettings = super.buildRealmSettings(secondaryRealmConfig, secondaryRoleMappings, + getNodeTrustedCertificates()); secondarySettings.keySet().forEach(name -> { String newName = name.replace(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL, XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + "2"); builder.copy(newName, name, secondarySettings);